| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1546, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 3.6466701238755213, | |
| "learning_rate": 2.1276595744680852e-07, | |
| "loss": 0.348, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 4.139185622660457, | |
| "learning_rate": 4.2553191489361704e-07, | |
| "loss": 0.3305, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 8.881200735394447, | |
| "learning_rate": 6.382978723404255e-07, | |
| "loss": 0.3234, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 3.7485825244709936, | |
| "learning_rate": 8.510638297872341e-07, | |
| "loss": 0.3036, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 3.870091850769131, | |
| "learning_rate": 1.0638297872340427e-06, | |
| "loss": 0.3082, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 4.147534101913999, | |
| "learning_rate": 1.276595744680851e-06, | |
| "loss": 0.2976, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 4.0450582796416175, | |
| "learning_rate": 1.4893617021276596e-06, | |
| "loss": 0.3194, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.9648702987949624, | |
| "learning_rate": 1.7021276595744682e-06, | |
| "loss": 0.3229, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.05272089122871, | |
| "learning_rate": 1.9148936170212767e-06, | |
| "loss": 0.2646, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.9192913531636706, | |
| "learning_rate": 2.1276595744680853e-06, | |
| "loss": 0.2736, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.967249286697924, | |
| "learning_rate": 2.340425531914894e-06, | |
| "loss": 0.2925, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.138517274675191, | |
| "learning_rate": 2.553191489361702e-06, | |
| "loss": 0.2967, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 8.33936595379889, | |
| "learning_rate": 2.765957446808511e-06, | |
| "loss": 0.319, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.555705158505097, | |
| "learning_rate": 2.978723404255319e-06, | |
| "loss": 0.2793, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.181269031597184, | |
| "learning_rate": 3.191489361702128e-06, | |
| "loss": 0.2417, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.064313198312008, | |
| "learning_rate": 3.4042553191489363e-06, | |
| "loss": 0.2061, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.61672727351488, | |
| "learning_rate": 3.6170212765957453e-06, | |
| "loss": 0.2546, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.7903212709494416, | |
| "learning_rate": 3.8297872340425535e-06, | |
| "loss": 0.2521, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.7762556046042675, | |
| "learning_rate": 4.042553191489362e-06, | |
| "loss": 0.1738, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.6164798733780423, | |
| "learning_rate": 4.255319148936171e-06, | |
| "loss": 0.1681, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.794571211225896, | |
| "learning_rate": 4.468085106382979e-06, | |
| "loss": 0.1864, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.1344038725423276, | |
| "learning_rate": 4.680851063829788e-06, | |
| "loss": 0.1685, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.6772778738316547, | |
| "learning_rate": 4.893617021276596e-06, | |
| "loss": 0.1733, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.5864900115307945, | |
| "learning_rate": 5.106382978723404e-06, | |
| "loss": 0.1614, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.1829091283205524, | |
| "learning_rate": 5.319148936170213e-06, | |
| "loss": 0.1749, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.627918949690976, | |
| "learning_rate": 5.531914893617022e-06, | |
| "loss": 0.156, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.1489057985777893, | |
| "learning_rate": 5.744680851063831e-06, | |
| "loss": 0.1511, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.4901325902309766, | |
| "learning_rate": 5.957446808510638e-06, | |
| "loss": 0.1622, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.5737914145996683, | |
| "learning_rate": 6.170212765957447e-06, | |
| "loss": 0.1272, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.6639517888902864, | |
| "learning_rate": 6.382978723404256e-06, | |
| "loss": 0.129, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.9458157448896816, | |
| "learning_rate": 6.595744680851064e-06, | |
| "loss": 0.1604, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.0682564766093923, | |
| "learning_rate": 6.808510638297873e-06, | |
| "loss": 0.1413, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.4160767147719295, | |
| "learning_rate": 7.021276595744682e-06, | |
| "loss": 0.1714, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.9442874642653694, | |
| "learning_rate": 7.234042553191491e-06, | |
| "loss": 0.1553, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.6201074618551132, | |
| "learning_rate": 7.446808510638298e-06, | |
| "loss": 0.1599, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.499964053871311, | |
| "learning_rate": 7.659574468085107e-06, | |
| "loss": 0.1375, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.392017059081343, | |
| "learning_rate": 7.872340425531916e-06, | |
| "loss": 0.119, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.3073895290345847, | |
| "learning_rate": 8.085106382978723e-06, | |
| "loss": 0.1497, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.0072381649457016, | |
| "learning_rate": 8.297872340425532e-06, | |
| "loss": 0.1155, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.1409222987029075, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.1353, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.339746183858402, | |
| "learning_rate": 8.72340425531915e-06, | |
| "loss": 0.1589, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.0962604851483464, | |
| "learning_rate": 8.936170212765958e-06, | |
| "loss": 0.1487, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.2204015267252832, | |
| "learning_rate": 9.148936170212767e-06, | |
| "loss": 0.1413, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.0308968159970804, | |
| "learning_rate": 9.361702127659576e-06, | |
| "loss": 0.12, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.120274606415836, | |
| "learning_rate": 9.574468085106385e-06, | |
| "loss": 0.1462, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.214971309457947, | |
| "learning_rate": 9.787234042553192e-06, | |
| "loss": 0.145, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.453479965060542, | |
| "learning_rate": 1e-05, | |
| "loss": 0.1249, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.9638463123214622, | |
| "learning_rate": 9.999989019140636e-06, | |
| "loss": 0.0844, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.0761585516779526, | |
| "learning_rate": 9.999956076610778e-06, | |
| "loss": 0.1446, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.061496741512052, | |
| "learning_rate": 9.999901172555116e-06, | |
| "loss": 0.1232, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.0735089733196803, | |
| "learning_rate": 9.999824307214812e-06, | |
| "loss": 0.1405, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 5.973897420839363, | |
| "learning_rate": 9.999725480927483e-06, | |
| "loss": 0.1145, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.633648585469745, | |
| "learning_rate": 9.99960469412721e-06, | |
| "loss": 0.1133, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.6460370470359849, | |
| "learning_rate": 9.999461947344526e-06, | |
| "loss": 0.114, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.9745205343016254, | |
| "learning_rate": 9.99929724120643e-06, | |
| "loss": 0.1496, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.0268455134518746, | |
| "learning_rate": 9.99911057643636e-06, | |
| "loss": 0.1067, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.9753338409612728, | |
| "learning_rate": 9.998901953854222e-06, | |
| "loss": 0.1423, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.8231456903833867, | |
| "learning_rate": 9.998671374376349e-06, | |
| "loss": 0.1145, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.4924704043661263, | |
| "learning_rate": 9.99841883901553e-06, | |
| "loss": 0.1435, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.9320494942976505, | |
| "learning_rate": 9.998144348880984e-06, | |
| "loss": 0.1188, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.599418714042963, | |
| "learning_rate": 9.99784790517837e-06, | |
| "loss": 0.0879, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.9477442918289953, | |
| "learning_rate": 9.997529509209768e-06, | |
| "loss": 0.1174, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.330125781434381, | |
| "learning_rate": 9.997189162373683e-06, | |
| "loss": 0.164, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.166128412915446, | |
| "learning_rate": 9.996826866165036e-06, | |
| "loss": 0.1146, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.8745142590127868, | |
| "learning_rate": 9.996442622175156e-06, | |
| "loss": 0.1443, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.355826899731574, | |
| "learning_rate": 9.996036432091773e-06, | |
| "loss": 0.0947, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.2179182480709434, | |
| "learning_rate": 9.995608297699018e-06, | |
| "loss": 0.1541, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.5146551784021653, | |
| "learning_rate": 9.9951582208774e-06, | |
| "loss": 0.1459, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.675961892975102, | |
| "learning_rate": 9.994686203603811e-06, | |
| "loss": 0.0971, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.8476255656557437, | |
| "learning_rate": 9.994192247951516e-06, | |
| "loss": 0.1003, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.014736730942368, | |
| "learning_rate": 9.993676356090139e-06, | |
| "loss": 0.1097, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.1036545470925554, | |
| "learning_rate": 9.993138530285649e-06, | |
| "loss": 0.1264, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.107461437774381, | |
| "learning_rate": 9.992578772900366e-06, | |
| "loss": 0.1379, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.0409530279397967, | |
| "learning_rate": 9.991997086392936e-06, | |
| "loss": 0.1051, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.7745745669427309, | |
| "learning_rate": 9.991393473318326e-06, | |
| "loss": 0.1139, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.5490723949036367, | |
| "learning_rate": 9.990767936327812e-06, | |
| "loss": 0.1517, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.195736654386886, | |
| "learning_rate": 9.990120478168968e-06, | |
| "loss": 0.1222, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.8776753484302193, | |
| "learning_rate": 9.98945110168565e-06, | |
| "loss": 0.1642, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.3857244336207146, | |
| "learning_rate": 9.988759809817995e-06, | |
| "loss": 0.0998, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.0118828036816483, | |
| "learning_rate": 9.98804660560239e-06, | |
| "loss": 0.1196, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.8691147410130577, | |
| "learning_rate": 9.987311492171474e-06, | |
| "loss": 0.1411, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.716478047398871, | |
| "learning_rate": 9.98655447275412e-06, | |
| "loss": 0.1183, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.241445397670684, | |
| "learning_rate": 9.985775550675415e-06, | |
| "loss": 0.0781, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 4.637285204874327, | |
| "learning_rate": 9.984974729356653e-06, | |
| "loss": 0.1116, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.7402339286961375, | |
| "learning_rate": 9.984152012315317e-06, | |
| "loss": 0.1117, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.088002718923483, | |
| "learning_rate": 9.983307403165063e-06, | |
| "loss": 0.1444, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.518789329085565, | |
| "learning_rate": 9.982440905615705e-06, | |
| "loss": 0.1158, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.2952247513792705, | |
| "learning_rate": 9.981552523473198e-06, | |
| "loss": 0.1189, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.50747528627161, | |
| "learning_rate": 9.980642260639621e-06, | |
| "loss": 0.1034, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.557146005325423, | |
| "learning_rate": 9.979710121113163e-06, | |
| "loss": 0.1167, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.948661547281757, | |
| "learning_rate": 9.978756108988098e-06, | |
| "loss": 0.1696, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.230926069527611, | |
| "learning_rate": 9.977780228454779e-06, | |
| "loss": 0.1652, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.9280522718280948, | |
| "learning_rate": 9.976782483799607e-06, | |
| "loss": 0.11, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 5.5706395089517855, | |
| "learning_rate": 9.975762879405018e-06, | |
| "loss": 0.1413, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.9503282618719768, | |
| "learning_rate": 9.974721419749466e-06, | |
| "loss": 0.1221, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.6897748435855646, | |
| "learning_rate": 9.973658109407403e-06, | |
| "loss": 0.105, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.1150461944750827, | |
| "learning_rate": 9.97257295304925e-06, | |
| "loss": 0.1422, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.149809103068844, | |
| "learning_rate": 9.971465955441386e-06, | |
| "loss": 0.0991, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.9774388476377238, | |
| "learning_rate": 9.970337121446127e-06, | |
| "loss": 0.1027, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.6539049989428836, | |
| "learning_rate": 9.9691864560217e-06, | |
| "loss": 0.1248, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.8618631406274415, | |
| "learning_rate": 9.968013964222223e-06, | |
| "loss": 0.0952, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.6700970501837484, | |
| "learning_rate": 9.96681965119768e-06, | |
| "loss": 0.1595, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.4203655017115824, | |
| "learning_rate": 9.96560352219391e-06, | |
| "loss": 0.1065, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 3.425921167211727, | |
| "learning_rate": 9.964365582552566e-06, | |
| "loss": 0.1272, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.1493403207347677, | |
| "learning_rate": 9.963105837711104e-06, | |
| "loss": 0.0693, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.9586184930220423, | |
| "learning_rate": 9.961824293202758e-06, | |
| "loss": 0.1232, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.016492233318853, | |
| "learning_rate": 9.960520954656512e-06, | |
| "loss": 0.0856, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.133900070676782, | |
| "learning_rate": 9.959195827797075e-06, | |
| "loss": 0.1285, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.013620495105404, | |
| "learning_rate": 9.957848918444861e-06, | |
| "loss": 0.1463, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.918642411411689, | |
| "learning_rate": 9.956480232515959e-06, | |
| "loss": 0.1226, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.743736443377982, | |
| "learning_rate": 9.955089776022108e-06, | |
| "loss": 0.1204, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.5410707973525848, | |
| "learning_rate": 9.953677555070671e-06, | |
| "loss": 0.0844, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.8403838218102002, | |
| "learning_rate": 9.952243575864608e-06, | |
| "loss": 0.1121, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.8857421494208948, | |
| "learning_rate": 9.950787844702447e-06, | |
| "loss": 0.1447, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.7670301270073683, | |
| "learning_rate": 9.949310367978262e-06, | |
| "loss": 0.1215, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.8156401555599075, | |
| "learning_rate": 9.947811152181637e-06, | |
| "loss": 0.1071, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.8639909815301174, | |
| "learning_rate": 9.946290203897643e-06, | |
| "loss": 0.1615, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.948669957321261, | |
| "learning_rate": 9.944747529806811e-06, | |
| "loss": 0.1239, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.093435371849378, | |
| "learning_rate": 9.943183136685092e-06, | |
| "loss": 0.1595, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.9885745564004753, | |
| "learning_rate": 9.94159703140384e-06, | |
| "loss": 0.172, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.0699282007190205, | |
| "learning_rate": 9.939989220929772e-06, | |
| "loss": 0.0966, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.9316680877282852, | |
| "learning_rate": 9.938359712324948e-06, | |
| "loss": 0.1261, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.9213513024255975, | |
| "learning_rate": 9.936708512746729e-06, | |
| "loss": 0.1385, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.0482109640638515, | |
| "learning_rate": 9.935035629447749e-06, | |
| "loss": 0.1446, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.7866384022513413, | |
| "learning_rate": 9.93334106977589e-06, | |
| "loss": 0.1218, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.5840977776437648, | |
| "learning_rate": 9.931624841174238e-06, | |
| "loss": 0.113, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.8804553233658048, | |
| "learning_rate": 9.929886951181059e-06, | |
| "loss": 0.124, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.2666728116858486, | |
| "learning_rate": 9.928127407429764e-06, | |
| "loss": 0.1888, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.5646682358647588, | |
| "learning_rate": 9.926346217648874e-06, | |
| "loss": 0.1171, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.4848420454582072, | |
| "learning_rate": 9.924543389661987e-06, | |
| "loss": 0.088, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.020458437632709, | |
| "learning_rate": 9.922718931387742e-06, | |
| "loss": 0.1485, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.8827184023295047, | |
| "learning_rate": 9.92087285083979e-06, | |
| "loss": 0.1168, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.0266100825197584, | |
| "learning_rate": 9.919005156126746e-06, | |
| "loss": 0.146, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.235846705437004, | |
| "learning_rate": 9.917115855452172e-06, | |
| "loss": 0.1166, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.73904040901477, | |
| "learning_rate": 9.915204957114524e-06, | |
| "loss": 0.1521, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.1834714487743683, | |
| "learning_rate": 9.913272469507124e-06, | |
| "loss": 0.1304, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.8607754039829953, | |
| "learning_rate": 9.911318401118124e-06, | |
| "loss": 0.1205, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.7738065734608983, | |
| "learning_rate": 9.909342760530461e-06, | |
| "loss": 0.1318, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.0090370977928287, | |
| "learning_rate": 9.90734555642183e-06, | |
| "loss": 0.1088, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.7717365204692492, | |
| "learning_rate": 9.905326797564637e-06, | |
| "loss": 0.1075, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.9880094654910834, | |
| "learning_rate": 9.903286492825965e-06, | |
| "loss": 0.1027, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.0750699770709584, | |
| "learning_rate": 9.901224651167534e-06, | |
| "loss": 0.1168, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.3732135065551923, | |
| "learning_rate": 9.899141281645662e-06, | |
| "loss": 0.1533, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.703545439116926, | |
| "learning_rate": 9.89703639341122e-06, | |
| "loss": 0.1117, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.0281249810083595, | |
| "learning_rate": 9.894909995709607e-06, | |
| "loss": 0.1188, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.883392386333987, | |
| "learning_rate": 9.892762097880689e-06, | |
| "loss": 0.1169, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.8187730542306118, | |
| "learning_rate": 9.890592709358771e-06, | |
| "loss": 0.152, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.2339440137327795, | |
| "learning_rate": 9.888401839672554e-06, | |
| "loss": 0.1311, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.349926480407424, | |
| "learning_rate": 9.886189498445091e-06, | |
| "loss": 0.1474, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.5280358206435156, | |
| "learning_rate": 9.883955695393745e-06, | |
| "loss": 0.1089, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.55628950397484, | |
| "learning_rate": 9.881700440330148e-06, | |
| "loss": 0.1677, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 3.599168966333966, | |
| "learning_rate": 9.879423743160154e-06, | |
| "loss": 0.1218, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 3.065147882840751, | |
| "learning_rate": 9.877125613883799e-06, | |
| "loss": 0.1147, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.0973331798577304, | |
| "learning_rate": 9.87480606259526e-06, | |
| "loss": 0.1266, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.3614508711991764, | |
| "learning_rate": 9.872465099482798e-06, | |
| "loss": 0.1399, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.2839164920052997, | |
| "learning_rate": 9.870102734828733e-06, | |
| "loss": 0.1515, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.191700939824119, | |
| "learning_rate": 9.86771897900938e-06, | |
| "loss": 0.1186, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.7304607163865133, | |
| "learning_rate": 9.865313842495016e-06, | |
| "loss": 0.1165, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 5.988091314799846, | |
| "learning_rate": 9.862887335849825e-06, | |
| "loss": 0.1378, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.8491262715856396, | |
| "learning_rate": 9.86043946973186e-06, | |
| "loss": 0.1293, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.529334256156627, | |
| "learning_rate": 9.857970254892987e-06, | |
| "loss": 0.097, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.773404734705276, | |
| "learning_rate": 9.855479702178851e-06, | |
| "loss": 0.1168, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.4916119001719945, | |
| "learning_rate": 9.852967822528814e-06, | |
| "loss": 0.1687, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.5393415410713551, | |
| "learning_rate": 9.850434626975913e-06, | |
| "loss": 0.0885, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.9362575076807371, | |
| "learning_rate": 9.847880126646816e-06, | |
| "loss": 0.1465, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.5522038167181273, | |
| "learning_rate": 9.845304332761767e-06, | |
| "loss": 0.119, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.6333192172897075, | |
| "learning_rate": 9.842707256634534e-06, | |
| "loss": 0.1359, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.2486156763298677, | |
| "learning_rate": 9.840088909672373e-06, | |
| "loss": 0.1249, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.9948386846492634, | |
| "learning_rate": 9.83744930337596e-06, | |
| "loss": 0.1721, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.5051178680333455, | |
| "learning_rate": 9.834788449339359e-06, | |
| "loss": 0.1419, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.089529822824879, | |
| "learning_rate": 9.83210635924995e-06, | |
| "loss": 0.149, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.2638144924488817, | |
| "learning_rate": 9.829403044888393e-06, | |
| "loss": 0.1549, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.4470602369375063, | |
| "learning_rate": 9.82667851812858e-06, | |
| "loss": 0.1178, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.4740151917325277, | |
| "learning_rate": 9.823932790937565e-06, | |
| "loss": 0.118, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.7921124188059911, | |
| "learning_rate": 9.821165875375528e-06, | |
| "loss": 0.1317, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.7529591680637229, | |
| "learning_rate": 9.818377783595712e-06, | |
| "loss": 0.1209, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.680109016010429, | |
| "learning_rate": 9.815568527844375e-06, | |
| "loss": 0.1053, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.8681169915291045, | |
| "learning_rate": 9.812738120460732e-06, | |
| "loss": 0.1135, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.7484525794627954, | |
| "learning_rate": 9.809886573876908e-06, | |
| "loss": 0.1187, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.9799368911132444, | |
| "learning_rate": 9.807013900617874e-06, | |
| "loss": 0.1148, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.5209981726206028, | |
| "learning_rate": 9.8041201133014e-06, | |
| "loss": 0.1053, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.7761665804917133, | |
| "learning_rate": 9.801205224637993e-06, | |
| "loss": 0.1092, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.4864267110118337, | |
| "learning_rate": 9.798269247430847e-06, | |
| "loss": 0.1798, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.6997732391026787, | |
| "learning_rate": 9.79531219457578e-06, | |
| "loss": 0.1074, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.9235303414692586, | |
| "learning_rate": 9.792334079061192e-06, | |
| "loss": 0.1283, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.9637623355738383, | |
| "learning_rate": 9.789334913967982e-06, | |
| "loss": 0.1196, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.0004951230330947, | |
| "learning_rate": 9.786314712469519e-06, | |
| "loss": 0.1495, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.780329747363877, | |
| "learning_rate": 9.783273487831564e-06, | |
| "loss": 0.1545, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.0685841478506255, | |
| "learning_rate": 9.780211253412222e-06, | |
| "loss": 0.1282, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.2671215265269633, | |
| "learning_rate": 9.777128022661877e-06, | |
| "loss": 0.1748, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.3902891616790383, | |
| "learning_rate": 9.774023809123142e-06, | |
| "loss": 0.1474, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 4.428055584599217, | |
| "learning_rate": 9.770898626430786e-06, | |
| "loss": 0.0995, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.5921445659631617, | |
| "learning_rate": 9.76775248831169e-06, | |
| "loss": 0.1075, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.8876348934658589, | |
| "learning_rate": 9.764585408584772e-06, | |
| "loss": 0.1591, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.4909969670884806, | |
| "learning_rate": 9.761397401160934e-06, | |
| "loss": 0.0882, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.6466444261010116, | |
| "learning_rate": 9.758188480043e-06, | |
| "loss": 0.1399, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.2507322870188777, | |
| "learning_rate": 9.754958659325656e-06, | |
| "loss": 0.151, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.9965502835375872, | |
| "learning_rate": 9.751707953195386e-06, | |
| "loss": 0.1243, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.967103185033387, | |
| "learning_rate": 9.748436375930406e-06, | |
| "loss": 0.1103, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.9032906864214325, | |
| "learning_rate": 9.745143941900607e-06, | |
| "loss": 0.1245, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.82973826843128, | |
| "learning_rate": 9.741830665567498e-06, | |
| "loss": 0.1209, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.1136392039623297, | |
| "learning_rate": 9.738496561484118e-06, | |
| "loss": 0.1063, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.656781829823401, | |
| "learning_rate": 9.735141644295006e-06, | |
| "loss": 0.1834, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 3.2732162880867715, | |
| "learning_rate": 9.731765928736107e-06, | |
| "loss": 0.1175, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.9163592371029838, | |
| "learning_rate": 9.728369429634728e-06, | |
| "loss": 0.1308, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.9926464099145167, | |
| "learning_rate": 9.724952161909456e-06, | |
| "loss": 0.1055, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.212171729804821, | |
| "learning_rate": 9.721514140570108e-06, | |
| "loss": 0.1205, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.089447112787558, | |
| "learning_rate": 9.718055380717655e-06, | |
| "loss": 0.1257, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.8443109675212546, | |
| "learning_rate": 9.714575897544161e-06, | |
| "loss": 0.1243, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.8644539626190368, | |
| "learning_rate": 9.71107570633271e-06, | |
| "loss": 0.1027, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.9358246309330898, | |
| "learning_rate": 9.707554822457346e-06, | |
| "loss": 0.1144, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6560610615371094, | |
| "learning_rate": 9.704013261382998e-06, | |
| "loss": 0.1304, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.5270084771765515, | |
| "learning_rate": 9.700451038665427e-06, | |
| "loss": 0.0894, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.5406483878286648, | |
| "learning_rate": 9.696868169951133e-06, | |
| "loss": 0.1333, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.9375740465908717, | |
| "learning_rate": 9.693264670977307e-06, | |
| "loss": 0.1245, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.0904332183871945, | |
| "learning_rate": 9.68964055757176e-06, | |
| "loss": 0.1355, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.233484010834808, | |
| "learning_rate": 9.68599584565284e-06, | |
| "loss": 0.1303, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.7671033554601976, | |
| "learning_rate": 9.682330551229378e-06, | |
| "loss": 0.1539, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.5736703608691354, | |
| "learning_rate": 9.678644690400602e-06, | |
| "loss": 0.1312, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.788108504780175, | |
| "learning_rate": 9.674938279356086e-06, | |
| "loss": 0.1256, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.4017354768821373, | |
| "learning_rate": 9.671211334375655e-06, | |
| "loss": 0.1195, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.554406170607817, | |
| "learning_rate": 9.667463871829336e-06, | |
| "loss": 0.1152, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.9776198759979988, | |
| "learning_rate": 9.663695908177272e-06, | |
| "loss": 0.1232, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.8763841196803877, | |
| "learning_rate": 9.659907459969656e-06, | |
| "loss": 0.1162, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.603879538170472, | |
| "learning_rate": 9.656098543846652e-06, | |
| "loss": 0.0987, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.5957011663071399, | |
| "learning_rate": 9.652269176538332e-06, | |
| "loss": 0.0967, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.5595559852063516, | |
| "learning_rate": 9.648419374864591e-06, | |
| "loss": 0.1131, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.6225818778400385, | |
| "learning_rate": 9.644549155735081e-06, | |
| "loss": 0.1079, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.4391874862387903, | |
| "learning_rate": 9.640658536149137e-06, | |
| "loss": 0.111, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.9059567028458015, | |
| "learning_rate": 9.636747533195698e-06, | |
| "loss": 0.0989, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.9499035596099419, | |
| "learning_rate": 9.632816164053232e-06, | |
| "loss": 0.1746, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.7566067913290406, | |
| "learning_rate": 9.62886444598966e-06, | |
| "loss": 0.1225, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.4625347527200148, | |
| "learning_rate": 9.624892396362293e-06, | |
| "loss": 0.1066, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.9096185245160564, | |
| "learning_rate": 9.620900032617734e-06, | |
| "loss": 0.1614, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.6414618208334602, | |
| "learning_rate": 9.61688737229182e-06, | |
| "loss": 0.1319, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.01434795372801, | |
| "learning_rate": 9.612854433009531e-06, | |
| "loss": 0.0905, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.8677307978193975, | |
| "learning_rate": 9.608801232484923e-06, | |
| "loss": 0.1145, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.6100118695054884, | |
| "learning_rate": 9.604727788521048e-06, | |
| "loss": 0.1252, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.029485209887423, | |
| "learning_rate": 9.600634119009873e-06, | |
| "loss": 0.1591, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6830484776374746, | |
| "learning_rate": 9.596520241932198e-06, | |
| "loss": 0.0982, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.8585528147339136, | |
| "learning_rate": 9.592386175357589e-06, | |
| "loss": 0.1297, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.7993240085387165, | |
| "learning_rate": 9.588231937444284e-06, | |
| "loss": 0.1164, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6977256582020621, | |
| "learning_rate": 9.584057546439126e-06, | |
| "loss": 0.1073, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6761942593097194, | |
| "learning_rate": 9.579863020677475e-06, | |
| "loss": 0.1184, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.8129631892657423, | |
| "learning_rate": 9.575648378583129e-06, | |
| "loss": 0.1019, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.0313290754950706, | |
| "learning_rate": 9.571413638668246e-06, | |
| "loss": 0.121, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.7374763184195408, | |
| "learning_rate": 9.56715881953326e-06, | |
| "loss": 0.1001, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6691847184224187, | |
| "learning_rate": 9.562883939866797e-06, | |
| "loss": 0.1373, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.755427079402434, | |
| "learning_rate": 9.5585890184456e-06, | |
| "loss": 0.1264, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.004942074380999, | |
| "learning_rate": 9.55427407413444e-06, | |
| "loss": 0.1415, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.662759577069425, | |
| "learning_rate": 9.549939125886033e-06, | |
| "loss": 0.1086, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.312626616306414, | |
| "learning_rate": 9.545584192740965e-06, | |
| "loss": 0.1526, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.3728980521640644, | |
| "learning_rate": 9.541209293827599e-06, | |
| "loss": 0.1017, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.164712163219188, | |
| "learning_rate": 9.536814448361993e-06, | |
| "loss": 0.1429, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.3341674550010627, | |
| "learning_rate": 9.53239967564782e-06, | |
| "loss": 0.0793, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.9368004727513668, | |
| "learning_rate": 9.527964995076279e-06, | |
| "loss": 0.1179, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.3974500112400439, | |
| "learning_rate": 9.523510426126015e-06, | |
| "loss": 0.1052, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.4225137071802845, | |
| "learning_rate": 9.519035988363021e-06, | |
| "loss": 0.1932, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.9997143412977902, | |
| "learning_rate": 9.514541701440568e-06, | |
| "loss": 0.1219, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.02847423848522, | |
| "learning_rate": 9.510027585099107e-06, | |
| "loss": 0.0878, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.89227289355829, | |
| "learning_rate": 9.505493659166193e-06, | |
| "loss": 0.123, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.9895504729547155, | |
| "learning_rate": 9.500939943556383e-06, | |
| "loss": 0.1601, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.090635912690094, | |
| "learning_rate": 9.496366458271165e-06, | |
| "loss": 0.1193, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.4116455872624623, | |
| "learning_rate": 9.491773223398855e-06, | |
| "loss": 0.1142, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.1217316271010223, | |
| "learning_rate": 9.487160259114521e-06, | |
| "loss": 0.1351, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.9242039435529, | |
| "learning_rate": 9.482527585679886e-06, | |
| "loss": 0.119, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.315115479904517, | |
| "learning_rate": 9.477875223443249e-06, | |
| "loss": 0.1284, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.2719860315913696, | |
| "learning_rate": 9.473203192839379e-06, | |
| "loss": 0.1721, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.5947127743255973, | |
| "learning_rate": 9.468511514389442e-06, | |
| "loss": 0.1036, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.9584745483264658, | |
| "learning_rate": 9.463800208700904e-06, | |
| "loss": 0.1192, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.7447628859050024, | |
| "learning_rate": 9.45906929646744e-06, | |
| "loss": 0.1161, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 14.200458714414616, | |
| "learning_rate": 9.454318798468838e-06, | |
| "loss": 0.0977, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.213702512044038, | |
| "learning_rate": 9.449548735570922e-06, | |
| "loss": 0.129, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 3.3993828599611606, | |
| "learning_rate": 9.444759128725446e-06, | |
| "loss": 0.1118, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.6488629327673772, | |
| "learning_rate": 9.439949998970012e-06, | |
| "loss": 0.1289, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.8771789314272584, | |
| "learning_rate": 9.435121367427969e-06, | |
| "loss": 0.1407, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.0141415937656255, | |
| "learning_rate": 9.430273255308329e-06, | |
| "loss": 0.098, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.6490964523017495, | |
| "learning_rate": 9.425405683905664e-06, | |
| "loss": 0.1119, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.949423514590385, | |
| "learning_rate": 9.420518674600019e-06, | |
| "loss": 0.0949, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.301200040078636, | |
| "learning_rate": 9.415612248856824e-06, | |
| "loss": 0.1356, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.4438448938217925, | |
| "learning_rate": 9.410686428226784e-06, | |
| "loss": 0.1168, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.8595023110908409, | |
| "learning_rate": 9.4057412343458e-06, | |
| "loss": 0.1565, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 14.015898381538342, | |
| "learning_rate": 9.40077668893486e-06, | |
| "loss": 0.1257, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.7641459666937271, | |
| "learning_rate": 9.395792813799954e-06, | |
| "loss": 0.1369, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.35003300843501, | |
| "learning_rate": 9.390789630831975e-06, | |
| "loss": 0.1258, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.9587992320280527, | |
| "learning_rate": 9.385767162006626e-06, | |
| "loss": 0.1198, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.3927013768818504, | |
| "learning_rate": 9.380725429384311e-06, | |
| "loss": 0.1038, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.9346564539370021, | |
| "learning_rate": 9.375664455110056e-06, | |
| "loss": 0.1057, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.969752960731544, | |
| "learning_rate": 9.3705842614134e-06, | |
| "loss": 0.1698, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.7366125913154666, | |
| "learning_rate": 9.365484870608298e-06, | |
| "loss": 0.1228, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.0991611774972725, | |
| "learning_rate": 9.360366305093029e-06, | |
| "loss": 0.0868, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.2938664385866487, | |
| "learning_rate": 9.355228587350091e-06, | |
| "loss": 0.1236, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.9417985269231075, | |
| "learning_rate": 9.350071739946107e-06, | |
| "loss": 0.1094, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.8408239731332179, | |
| "learning_rate": 9.344895785531725e-06, | |
| "loss": 0.1349, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.6860663646872776, | |
| "learning_rate": 9.339700746841514e-06, | |
| "loss": 0.1159, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.7887989384287633, | |
| "learning_rate": 9.334486646693868e-06, | |
| "loss": 0.1307, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.071047508689909, | |
| "learning_rate": 9.329253507990912e-06, | |
| "loss": 0.1382, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.9934009243327555, | |
| "learning_rate": 9.324001353718386e-06, | |
| "loss": 0.1131, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 4.293883374927684, | |
| "learning_rate": 9.318730206945558e-06, | |
| "loss": 0.1875, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.186485525987884, | |
| "learning_rate": 9.31344009082512e-06, | |
| "loss": 0.1332, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.7888241804719054, | |
| "learning_rate": 9.308131028593074e-06, | |
| "loss": 0.0941, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.4634800844949494, | |
| "learning_rate": 9.30280304356865e-06, | |
| "loss": 0.1507, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.0519592864111837, | |
| "learning_rate": 9.29745615915419e-06, | |
| "loss": 0.1696, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.16519408228547, | |
| "learning_rate": 9.292090398835048e-06, | |
| "loss": 0.1282, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.128764797470375, | |
| "learning_rate": 9.286705786179489e-06, | |
| "loss": 0.1631, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.30218954081609, | |
| "learning_rate": 9.281302344838579e-06, | |
| "loss": 0.1316, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.9155682605173827, | |
| "learning_rate": 9.275880098546092e-06, | |
| "loss": 0.1542, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.5614986150201413, | |
| "learning_rate": 9.270439071118398e-06, | |
| "loss": 0.1614, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.8525768538837735, | |
| "learning_rate": 9.264979286454358e-06, | |
| "loss": 0.1191, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.898080870573492, | |
| "learning_rate": 9.259500768535226e-06, | |
| "loss": 0.1181, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.472459360187466, | |
| "learning_rate": 9.254003541424534e-06, | |
| "loss": 0.1516, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.305568026320411, | |
| "learning_rate": 9.248487629267994e-06, | |
| "loss": 0.1615, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.6544217551014015, | |
| "learning_rate": 9.242953056293387e-06, | |
| "loss": 0.1054, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.8127935644971191, | |
| "learning_rate": 9.23739984681046e-06, | |
| "loss": 0.1441, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 3.9498604123511916, | |
| "learning_rate": 9.231828025210821e-06, | |
| "loss": 0.0988, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.9854409225946612, | |
| "learning_rate": 9.226237615967822e-06, | |
| "loss": 0.1331, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.6409608855962516, | |
| "learning_rate": 9.220628643636462e-06, | |
| "loss": 0.0962, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.382784821567455, | |
| "learning_rate": 9.215001132853277e-06, | |
| "loss": 0.1395, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.967955661382828, | |
| "learning_rate": 9.20935510833623e-06, | |
| "loss": 0.1418, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.9420469716155404, | |
| "learning_rate": 9.2036905948846e-06, | |
| "loss": 0.1504, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.002899106478933, | |
| "learning_rate": 9.198007617378876e-06, | |
| "loss": 0.155, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.978520034374933, | |
| "learning_rate": 9.192306200780652e-06, | |
| "loss": 0.1147, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 3.861974983143129, | |
| "learning_rate": 9.186586370132508e-06, | |
| "loss": 0.1141, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.7627717021521454, | |
| "learning_rate": 9.180848150557906e-06, | |
| "loss": 0.1287, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.612676555695171, | |
| "learning_rate": 9.175091567261078e-06, | |
| "loss": 0.1125, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.643370148198158, | |
| "learning_rate": 9.169316645526919e-06, | |
| "loss": 0.1505, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.5430892090106256, | |
| "learning_rate": 9.163523410720866e-06, | |
| "loss": 0.1229, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.696558808661286, | |
| "learning_rate": 9.157711888288802e-06, | |
| "loss": 0.0949, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.949950682230708, | |
| "learning_rate": 9.15188210375693e-06, | |
| "loss": 0.1075, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.281345406537966, | |
| "learning_rate": 9.146034082731668e-06, | |
| "loss": 0.1504, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.786416816817829, | |
| "learning_rate": 9.140167850899533e-06, | |
| "loss": 0.0944, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.1707974071960687, | |
| "learning_rate": 9.134283434027033e-06, | |
| "loss": 0.1559, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.630770248631941, | |
| "learning_rate": 9.12838085796055e-06, | |
| "loss": 0.1168, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.0322405623333113, | |
| "learning_rate": 9.122460148626227e-06, | |
| "loss": 0.1527, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.7322055643416592, | |
| "learning_rate": 9.116521332029852e-06, | |
| "loss": 0.1367, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.8373532226134297, | |
| "learning_rate": 9.110564434256752e-06, | |
| "loss": 0.1105, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.07554187753308, | |
| "learning_rate": 9.104589481471668e-06, | |
| "loss": 0.1334, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 3.2236998943249664, | |
| "learning_rate": 9.098596499918648e-06, | |
| "loss": 0.125, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.597186712884655, | |
| "learning_rate": 9.092585515920926e-06, | |
| "loss": 0.1293, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.7174798604288135, | |
| "learning_rate": 9.08655655588081e-06, | |
| "loss": 0.1263, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.2256543091224077, | |
| "learning_rate": 9.080509646279564e-06, | |
| "loss": 0.1771, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.4758883533703013, | |
| "learning_rate": 9.074444813677297e-06, | |
| "loss": 0.1727, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 4.783793767943786, | |
| "learning_rate": 9.068362084712835e-06, | |
| "loss": 0.1341, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.9378176129085365, | |
| "learning_rate": 9.062261486103614e-06, | |
| "loss": 0.1598, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.7783584831433117, | |
| "learning_rate": 9.056143044645564e-06, | |
| "loss": 0.1352, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.6546196413315133, | |
| "learning_rate": 9.05000678721298e-06, | |
| "loss": 0.1232, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.118380902242969, | |
| "learning_rate": 9.043852740758416e-06, | |
| "loss": 0.1549, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.6087772904564852, | |
| "learning_rate": 9.037680932312557e-06, | |
| "loss": 0.0881, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.7254126145695363, | |
| "learning_rate": 9.031491388984108e-06, | |
| "loss": 0.115, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 3.981321894036563, | |
| "learning_rate": 9.025284137959674e-06, | |
| "loss": 0.1445, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.075752065166549, | |
| "learning_rate": 9.019059206503632e-06, | |
| "loss": 0.1353, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.7852721093628698, | |
| "learning_rate": 9.012816621958018e-06, | |
| "loss": 0.1359, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.400157487370791, | |
| "learning_rate": 9.006556411742415e-06, | |
| "loss": 0.1049, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.51114251446299, | |
| "learning_rate": 9.000278603353817e-06, | |
| "loss": 0.0974, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 4.554006360375019, | |
| "learning_rate": 8.993983224366514e-06, | |
| "loss": 0.1627, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.292470957192174, | |
| "learning_rate": 8.987670302431977e-06, | |
| "loss": 0.1205, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.2727328929301023, | |
| "learning_rate": 8.981339865278726e-06, | |
| "loss": 0.1248, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.79429792897825, | |
| "learning_rate": 8.97499194071222e-06, | |
| "loss": 0.0865, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.534656293841016, | |
| "learning_rate": 8.968626556614723e-06, | |
| "loss": 0.1852, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.451261116593459, | |
| "learning_rate": 8.962243740945194e-06, | |
| "loss": 0.1735, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.6099662771095622, | |
| "learning_rate": 8.955843521739152e-06, | |
| "loss": 0.0949, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.952880241478913, | |
| "learning_rate": 8.949425927108557e-06, | |
| "loss": 0.1642, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.1001999836849707, | |
| "learning_rate": 8.942990985241694e-06, | |
| "loss": 0.1966, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.7791224591481396, | |
| "learning_rate": 8.936538724403036e-06, | |
| "loss": 0.1094, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.7248924617963337, | |
| "learning_rate": 8.930069172933133e-06, | |
| "loss": 0.1511, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.0501675667141672, | |
| "learning_rate": 8.923582359248482e-06, | |
| "loss": 0.1612, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.3735141875296217, | |
| "learning_rate": 8.917078311841395e-06, | |
| "loss": 0.1712, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.0407091995824618, | |
| "learning_rate": 8.91055705927988e-06, | |
| "loss": 0.1648, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.6480885359011928, | |
| "learning_rate": 8.904018630207526e-06, | |
| "loss": 0.1097, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.082826171786718, | |
| "learning_rate": 8.897463053343363e-06, | |
| "loss": 0.115, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.0778529231041247, | |
| "learning_rate": 8.89089035748173e-06, | |
| "loss": 0.1412, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.8870564121587825, | |
| "learning_rate": 8.884300571492173e-06, | |
| "loss": 0.1215, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.6728688014206319, | |
| "learning_rate": 8.877693724319294e-06, | |
| "loss": 0.1369, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.5071568023441673, | |
| "learning_rate": 8.871069844982639e-06, | |
| "loss": 0.1422, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.8405992455653366, | |
| "learning_rate": 8.864428962576558e-06, | |
| "loss": 0.1413, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.04100654612223, | |
| "learning_rate": 8.8577711062701e-06, | |
| "loss": 0.159, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.7258745698933753, | |
| "learning_rate": 8.851096305306846e-06, | |
| "loss": 0.0891, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.4535262921515013, | |
| "learning_rate": 8.844404589004825e-06, | |
| "loss": 0.0743, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.983927878208087, | |
| "learning_rate": 8.837695986756351e-06, | |
| "loss": 0.1091, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.5547966658918195, | |
| "learning_rate": 8.830970528027912e-06, | |
| "loss": 0.1509, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.177776323767593, | |
| "learning_rate": 8.824228242360035e-06, | |
| "loss": 0.0905, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.8634921804601332, | |
| "learning_rate": 8.817469159367159e-06, | |
| "loss": 0.1439, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.8253919296872063, | |
| "learning_rate": 8.810693308737493e-06, | |
| "loss": 0.1371, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.8264289794840303, | |
| "learning_rate": 8.803900720232908e-06, | |
| "loss": 0.1017, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.7598193012149368, | |
| "learning_rate": 8.797091423688787e-06, | |
| "loss": 0.104, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 4.6146507935852075, | |
| "learning_rate": 8.790265449013899e-06, | |
| "loss": 0.169, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.138258612818048, | |
| "learning_rate": 8.783422826190272e-06, | |
| "loss": 0.141, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.777360621027715, | |
| "learning_rate": 8.776563585273057e-06, | |
| "loss": 0.1147, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.0918180041637924, | |
| "learning_rate": 8.769687756390401e-06, | |
| "loss": 0.1118, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.0832589535976274, | |
| "learning_rate": 8.762795369743303e-06, | |
| "loss": 0.1144, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.7581039464137596, | |
| "learning_rate": 8.755886455605499e-06, | |
| "loss": 0.1251, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.4128017025241764, | |
| "learning_rate": 8.748961044323312e-06, | |
| "loss": 0.118, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.9739614747699874, | |
| "learning_rate": 8.74201916631553e-06, | |
| "loss": 0.124, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.823956299069119, | |
| "learning_rate": 8.735060852073267e-06, | |
| "loss": 0.1307, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.8653973387035925, | |
| "learning_rate": 8.72808613215983e-06, | |
| "loss": 0.1167, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.043078073672084, | |
| "learning_rate": 8.72109503721059e-06, | |
| "loss": 0.1276, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.3993371483045447, | |
| "learning_rate": 8.714087597932837e-06, | |
| "loss": 0.0887, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.155072263044344, | |
| "learning_rate": 8.70706384510565e-06, | |
| "loss": 0.1027, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.863716590798003, | |
| "learning_rate": 8.700023809579772e-06, | |
| "loss": 0.1426, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.4148553401960213, | |
| "learning_rate": 8.692967522277453e-06, | |
| "loss": 0.1831, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 3.2182007745276002, | |
| "learning_rate": 8.685895014192336e-06, | |
| "loss": 0.1189, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.7745164161829752, | |
| "learning_rate": 8.678806316389308e-06, | |
| "loss": 0.1264, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.9954907613170605, | |
| "learning_rate": 8.671701460004362e-06, | |
| "loss": 0.1119, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.443494772653162, | |
| "learning_rate": 8.664580476244476e-06, | |
| "loss": 0.1201, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 3.6521852062845737, | |
| "learning_rate": 8.657443396387456e-06, | |
| "loss": 0.1533, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.7783525006636822, | |
| "learning_rate": 8.650290251781806e-06, | |
| "loss": 0.136, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.8404598865947792, | |
| "learning_rate": 8.6431210738466e-06, | |
| "loss": 0.1032, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.993842867577624, | |
| "learning_rate": 8.635935894071332e-06, | |
| "loss": 0.1504, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.23506410525159, | |
| "learning_rate": 8.628734744015781e-06, | |
| "loss": 0.1362, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.5101646789391827, | |
| "learning_rate": 8.621517655309872e-06, | |
| "loss": 0.0944, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.1222112692022446, | |
| "learning_rate": 8.61428465965354e-06, | |
| "loss": 0.1106, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.589971462883146, | |
| "learning_rate": 8.60703578881659e-06, | |
| "loss": 0.1078, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.8152499697587898, | |
| "learning_rate": 8.599771074638552e-06, | |
| "loss": 0.1465, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.7515501086572345, | |
| "learning_rate": 8.592490549028549e-06, | |
| "loss": 0.1133, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.074545833246127, | |
| "learning_rate": 8.585194243965154e-06, | |
| "loss": 0.1131, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.6840561699332905, | |
| "learning_rate": 8.577882191496244e-06, | |
| "loss": 0.1251, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.8878023794880057, | |
| "learning_rate": 8.570554423738865e-06, | |
| "loss": 0.1415, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.3218353195989114, | |
| "learning_rate": 8.563210972879099e-06, | |
| "loss": 0.1421, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.809764155258485, | |
| "learning_rate": 8.5558518711719e-06, | |
| "loss": 0.0924, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.025310033583316, | |
| "learning_rate": 8.548477150940976e-06, | |
| "loss": 0.1101, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.495565142634987, | |
| "learning_rate": 8.541086844578632e-06, | |
| "loss": 0.1063, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.00374794205199, | |
| "learning_rate": 8.533680984545632e-06, | |
| "loss": 0.1318, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.698220498105753, | |
| "learning_rate": 8.526259603371063e-06, | |
| "loss": 0.1404, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.0282444847472982, | |
| "learning_rate": 8.518822733652179e-06, | |
| "loss": 0.1609, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.4428540102326999, | |
| "learning_rate": 8.511370408054269e-06, | |
| "loss": 0.1111, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.823326472349433, | |
| "learning_rate": 8.503902659310511e-06, | |
| "loss": 0.1303, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.975642411609206, | |
| "learning_rate": 8.49641952022182e-06, | |
| "loss": 0.1319, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.0984423008283546, | |
| "learning_rate": 8.488921023656717e-06, | |
| "loss": 0.1255, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 3.6327681226804796, | |
| "learning_rate": 8.481407202551179e-06, | |
| "loss": 0.1183, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.7233323660269233, | |
| "learning_rate": 8.47387808990849e-06, | |
| "loss": 0.1385, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.4460372159761063, | |
| "learning_rate": 8.466333718799097e-06, | |
| "loss": 0.1389, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 3.694191543079506, | |
| "learning_rate": 8.458774122360479e-06, | |
| "loss": 0.1357, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.541691399674599, | |
| "learning_rate": 8.451199333796974e-06, | |
| "loss": 0.0873, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 3.5747686527537694, | |
| "learning_rate": 8.44360938637966e-06, | |
| "loss": 0.1408, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.9808406911801135, | |
| "learning_rate": 8.436004313446198e-06, | |
| "loss": 0.1742, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.8562173966255497, | |
| "learning_rate": 8.428384148400679e-06, | |
| "loss": 0.1224, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.8682189381449035, | |
| "learning_rate": 8.420748924713489e-06, | |
| "loss": 0.1185, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.076463285377218, | |
| "learning_rate": 8.413098675921154e-06, | |
| "loss": 0.1169, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.5595921907762054, | |
| "learning_rate": 8.405433435626198e-06, | |
| "loss": 0.0771, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.6400626849748228, | |
| "learning_rate": 8.397753237496989e-06, | |
| "loss": 0.1257, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.9595113341341717, | |
| "learning_rate": 8.390058115267599e-06, | |
| "loss": 0.1308, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.9753446481614831, | |
| "learning_rate": 8.38234810273765e-06, | |
| "loss": 0.1197, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.088782693449183, | |
| "learning_rate": 8.374623233772166e-06, | |
| "loss": 0.1393, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.77933848636558, | |
| "learning_rate": 8.366883542301428e-06, | |
| "loss": 0.1237, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.3432881788234496, | |
| "learning_rate": 8.35912906232082e-06, | |
| "loss": 0.1008, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.542175556656122, | |
| "learning_rate": 8.351359827890686e-06, | |
| "loss": 0.1102, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.0198766647420334, | |
| "learning_rate": 8.343575873136174e-06, | |
| "loss": 0.115, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.9031797249550388, | |
| "learning_rate": 8.335777232247086e-06, | |
| "loss": 0.1523, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.179797475684397, | |
| "learning_rate": 8.327963939477736e-06, | |
| "loss": 0.1241, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.312455490674629, | |
| "learning_rate": 8.320136029146792e-06, | |
| "loss": 0.1833, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.260180433787793, | |
| "learning_rate": 8.312293535637123e-06, | |
| "loss": 0.1546, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 3.6785202678147257, | |
| "learning_rate": 8.304436493395663e-06, | |
| "loss": 0.1455, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.9858223819669532, | |
| "learning_rate": 8.296564936933237e-06, | |
| "loss": 0.1434, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.9102543474762492, | |
| "learning_rate": 8.28867890082443e-06, | |
| "loss": 0.1312, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.4219016246969467, | |
| "learning_rate": 8.280778419707421e-06, | |
| "loss": 0.1097, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.4863931985205288, | |
| "learning_rate": 8.27286352828384e-06, | |
| "loss": 0.1032, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.4262744449759, | |
| "learning_rate": 8.26493426131861e-06, | |
| "loss": 0.0883, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.8849980135344147, | |
| "learning_rate": 8.256990653639798e-06, | |
| "loss": 0.1465, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.90921490609553, | |
| "learning_rate": 8.24903274013846e-06, | |
| "loss": 0.1027, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.9345635072068623, | |
| "learning_rate": 8.241060555768487e-06, | |
| "loss": 0.1706, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.0906273304065, | |
| "learning_rate": 8.23307413554645e-06, | |
| "loss": 0.1054, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.7010263154623813, | |
| "learning_rate": 8.225073514551458e-06, | |
| "loss": 0.117, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.856999714971948, | |
| "learning_rate": 8.217058727924985e-06, | |
| "loss": 0.099, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.096162508384875, | |
| "learning_rate": 8.20902981087073e-06, | |
| "loss": 0.0992, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.1897409229932543, | |
| "learning_rate": 8.200986798654454e-06, | |
| "loss": 0.1353, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.0107497205490326, | |
| "learning_rate": 8.192929726603834e-06, | |
| "loss": 0.12, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.6764154097639425, | |
| "learning_rate": 8.184858630108301e-06, | |
| "loss": 0.0961, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.5132331514800432, | |
| "learning_rate": 8.176773544618884e-06, | |
| "loss": 0.1417, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.6290482258431218, | |
| "learning_rate": 8.168674505648055e-06, | |
| "loss": 0.0847, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.6373641890196442, | |
| "learning_rate": 8.16056154876958e-06, | |
| "loss": 0.0994, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.845726502290335, | |
| "learning_rate": 8.152434709618355e-06, | |
| "loss": 0.0841, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.155909263655658, | |
| "learning_rate": 8.144294023890249e-06, | |
| "loss": 0.1398, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 6.212072051455184, | |
| "learning_rate": 8.136139527341954e-06, | |
| "loss": 0.1668, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 3.0126155869943188, | |
| "learning_rate": 8.12797125579082e-06, | |
| "loss": 0.1522, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.4551386753433713, | |
| "learning_rate": 8.119789245114704e-06, | |
| "loss": 0.112, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.8418803359771359, | |
| "learning_rate": 8.111593531251812e-06, | |
| "loss": 0.1646, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.8692838890636365, | |
| "learning_rate": 8.103384150200535e-06, | |
| "loss": 0.1035, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.9377035472958686, | |
| "learning_rate": 8.095161138019294e-06, | |
| "loss": 0.0809, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 6.587432958669322, | |
| "learning_rate": 8.086924530826386e-06, | |
| "loss": 0.1454, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.02896456008906, | |
| "learning_rate": 8.078674364799823e-06, | |
| "loss": 0.1085, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.7655639368460225, | |
| "learning_rate": 8.070410676177171e-06, | |
| "loss": 0.1396, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.1284960143825113, | |
| "learning_rate": 8.062133501255388e-06, | |
| "loss": 0.148, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.0034337679566354, | |
| "learning_rate": 8.053842876390673e-06, | |
| "loss": 0.0812, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.85902216889917, | |
| "learning_rate": 8.045538837998299e-06, | |
| "loss": 0.1333, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.7602908592648836, | |
| "learning_rate": 8.03722142255246e-06, | |
| "loss": 0.153, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.3218830655644367, | |
| "learning_rate": 8.0288906665861e-06, | |
| "loss": 0.1066, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.1584017599108836, | |
| "learning_rate": 8.020546606690767e-06, | |
| "loss": 0.1519, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.7606072803807844, | |
| "learning_rate": 8.012189279516435e-06, | |
| "loss": 0.1137, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.888831011490011, | |
| "learning_rate": 8.003818721771364e-06, | |
| "loss": 0.1037, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 4.170836406338443, | |
| "learning_rate": 7.995434970221915e-06, | |
| "loss": 0.1317, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.7104103028650062, | |
| "learning_rate": 7.987038061692412e-06, | |
| "loss": 0.1219, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.995512990844272, | |
| "learning_rate": 7.97862803306496e-06, | |
| "loss": 0.137, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.8348429648687326, | |
| "learning_rate": 7.970204921279296e-06, | |
| "loss": 0.1202, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.9520817239757213, | |
| "learning_rate": 7.961768763332624e-06, | |
| "loss": 0.1318, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.8676185821725126, | |
| "learning_rate": 7.953319596279447e-06, | |
| "loss": 0.1976, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.6485722163392649, | |
| "learning_rate": 7.944857457231415e-06, | |
| "loss": 0.0991, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.8645910595696418, | |
| "learning_rate": 7.936382383357149e-06, | |
| "loss": 0.1366, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.0059127758512467, | |
| "learning_rate": 7.927894411882086e-06, | |
| "loss": 0.1127, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.836596301752791, | |
| "learning_rate": 7.919393580088317e-06, | |
| "loss": 0.1194, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.8460003077373996, | |
| "learning_rate": 7.910879925314413e-06, | |
| "loss": 0.1495, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.2392969367957027, | |
| "learning_rate": 7.902353484955277e-06, | |
| "loss": 0.1167, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.0193536716263605, | |
| "learning_rate": 7.893814296461964e-06, | |
| "loss": 0.1477, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.0299360866994416, | |
| "learning_rate": 7.885262397341524e-06, | |
| "loss": 0.1795, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.148228071373966, | |
| "learning_rate": 7.876697825156841e-06, | |
| "loss": 0.13, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.044815106464104, | |
| "learning_rate": 7.868120617526456e-06, | |
| "loss": 0.1135, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.8904373956408558, | |
| "learning_rate": 7.859530812124416e-06, | |
| "loss": 0.1556, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.8046682434008035, | |
| "learning_rate": 7.850928446680099e-06, | |
| "loss": 0.1452, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.1190190347465143, | |
| "learning_rate": 7.84231355897805e-06, | |
| "loss": 0.1561, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 5.047494027090662, | |
| "learning_rate": 7.833686186857815e-06, | |
| "loss": 0.1646, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.8698391434382085, | |
| "learning_rate": 7.825046368213782e-06, | |
| "loss": 0.1391, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.4357018771329595, | |
| "learning_rate": 7.816394140995004e-06, | |
| "loss": 0.1204, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.4093119255939646, | |
| "learning_rate": 7.807729543205035e-06, | |
| "loss": 0.0861, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.9753965478870885, | |
| "learning_rate": 7.799052612901767e-06, | |
| "loss": 0.1548, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.606627357271704, | |
| "learning_rate": 7.790363388197263e-06, | |
| "loss": 0.1268, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.0943568037356117, | |
| "learning_rate": 7.781661907257581e-06, | |
| "loss": 0.1215, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.4044794608294802, | |
| "learning_rate": 7.77294820830262e-06, | |
| "loss": 0.1042, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 32.00675322832298, | |
| "learning_rate": 7.764222329605939e-06, | |
| "loss": 0.1333, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.408529222244952, | |
| "learning_rate": 7.755484309494599e-06, | |
| "loss": 0.0864, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.8212939593374105, | |
| "learning_rate": 7.746734186348986e-06, | |
| "loss": 0.1593, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 4.137823332099506, | |
| "learning_rate": 7.737971998602648e-06, | |
| "loss": 0.1586, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.4537608630192507, | |
| "learning_rate": 7.729197784742125e-06, | |
| "loss": 0.1515, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.1365319942561576, | |
| "learning_rate": 7.720411583306784e-06, | |
| "loss": 0.1334, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.224916240795795, | |
| "learning_rate": 7.711613432888639e-06, | |
| "loss": 0.117, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.8956601461096838, | |
| "learning_rate": 7.702803372132192e-06, | |
| "loss": 0.1451, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.130808445432382, | |
| "learning_rate": 7.693981439734257e-06, | |
| "loss": 0.1382, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.9731631849430387, | |
| "learning_rate": 7.685147674443796e-06, | |
| "loss": 0.1499, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.4952826434696695, | |
| "learning_rate": 7.676302115061742e-06, | |
| "loss": 0.0755, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.4755572571402387, | |
| "learning_rate": 7.66744480044083e-06, | |
| "loss": 0.1635, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.384504040489573, | |
| "learning_rate": 7.658575769485432e-06, | |
| "loss": 0.1455, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.234483049009127, | |
| "learning_rate": 7.649695061151383e-06, | |
| "loss": 0.1507, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 3.3480972290543916, | |
| "learning_rate": 7.640802714445803e-06, | |
| "loss": 0.1164, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.449755875741938, | |
| "learning_rate": 7.631898768426938e-06, | |
| "loss": 0.1632, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.9895634278912286, | |
| "learning_rate": 7.6229832622039776e-06, | |
| "loss": 0.1249, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.38742227043979, | |
| "learning_rate": 7.61405623493689e-06, | |
| "loss": 0.1292, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.0347892998299764, | |
| "learning_rate": 7.605117725836251e-06, | |
| "loss": 0.119, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.390316072677058, | |
| "learning_rate": 7.596167774163061e-06, | |
| "loss": 0.1243, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.204263864515519, | |
| "learning_rate": 7.587206419228587e-06, | |
| "loss": 0.1239, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.3222180687668357, | |
| "learning_rate": 7.578233700394178e-06, | |
| "loss": 0.0961, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.8999977267746069, | |
| "learning_rate": 7.569249657071102e-06, | |
| "loss": 0.1176, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.7389595861953973, | |
| "learning_rate": 7.560254328720362e-06, | |
| "loss": 0.1073, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.2864263275239125, | |
| "learning_rate": 7.551247754852535e-06, | |
| "loss": 0.125, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 4.160533253201328, | |
| "learning_rate": 7.542229975027591e-06, | |
| "loss": 0.1067, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 13.328287081556125, | |
| "learning_rate": 7.533201028854713e-06, | |
| "loss": 0.1418, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 3.2510136986424896, | |
| "learning_rate": 7.524160955992142e-06, | |
| "loss": 0.1836, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.9837329996021282, | |
| "learning_rate": 7.515109796146982e-06, | |
| "loss": 0.1234, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.7815364070607005, | |
| "learning_rate": 7.506047589075041e-06, | |
| "loss": 0.1016, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.4046682954283596, | |
| "learning_rate": 7.496974374580645e-06, | |
| "loss": 0.1587, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.124077258722043, | |
| "learning_rate": 7.487890192516472e-06, | |
| "loss": 0.1121, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.0358602644377037, | |
| "learning_rate": 7.478795082783374e-06, | |
| "loss": 0.0946, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.988650043863186, | |
| "learning_rate": 7.469689085330196e-06, | |
| "loss": 0.1421, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.8578935497521591, | |
| "learning_rate": 7.4605722401536106e-06, | |
| "loss": 0.1546, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.735125797447667, | |
| "learning_rate": 7.451444587297937e-06, | |
| "loss": 0.0851, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.9757497485369377, | |
| "learning_rate": 7.4423061668549625e-06, | |
| "loss": 0.1119, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.684954557915866, | |
| "learning_rate": 7.433157018963771e-06, | |
| "loss": 0.1222, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.5222415804018157, | |
| "learning_rate": 7.423997183810565e-06, | |
| "loss": 0.0939, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.9844507330670464, | |
| "learning_rate": 7.41482670162849e-06, | |
| "loss": 0.1415, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.7027159832899936, | |
| "learning_rate": 7.405645612697456e-06, | |
| "loss": 0.1363, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.8367490333126868, | |
| "learning_rate": 7.396453957343961e-06, | |
| "loss": 0.1203, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.2815963303383255, | |
| "learning_rate": 7.3872517759409135e-06, | |
| "loss": 0.1599, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.994412011363053, | |
| "learning_rate": 7.378039108907461e-06, | |
| "loss": 0.1233, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.9423287451389104, | |
| "learning_rate": 7.368815996708801e-06, | |
| "loss": 0.1244, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.5723789324475477, | |
| "learning_rate": 7.3595824798560135e-06, | |
| "loss": 0.1762, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.512361293049007, | |
| "learning_rate": 7.350338598905878e-06, | |
| "loss": 0.1144, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.7297775495609162, | |
| "learning_rate": 7.341084394460698e-06, | |
| "loss": 0.1391, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.0229486702543236, | |
| "learning_rate": 7.331819907168121e-06, | |
| "loss": 0.1221, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.32719352344108, | |
| "learning_rate": 7.3225451777209585e-06, | |
| "loss": 0.1471, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.166222490887353, | |
| "learning_rate": 7.313260246857013e-06, | |
| "loss": 0.1238, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.114168239547472, | |
| "learning_rate": 7.303965155358888e-06, | |
| "loss": 0.1216, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.8124445478580515, | |
| "learning_rate": 7.294659944053822e-06, | |
| "loss": 0.15, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.2535216557090525, | |
| "learning_rate": 7.285344653813505e-06, | |
| "loss": 0.0811, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.047141351070446, | |
| "learning_rate": 7.276019325553891e-06, | |
| "loss": 0.1289, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 3.0581370858454067, | |
| "learning_rate": 7.266684000235028e-06, | |
| "loss": 0.1086, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.8854610758202903, | |
| "learning_rate": 7.2573387188608735e-06, | |
| "loss": 0.1266, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.829659439964262, | |
| "learning_rate": 7.247983522479114e-06, | |
| "loss": 0.1359, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.430253144187847, | |
| "learning_rate": 7.238618452180991e-06, | |
| "loss": 0.1242, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 5.3314444564097485, | |
| "learning_rate": 7.22924354910111e-06, | |
| "loss": 0.1509, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.5681898497611435, | |
| "learning_rate": 7.21985885441727e-06, | |
| "loss": 0.1167, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.5745929569582897, | |
| "learning_rate": 7.210464409350275e-06, | |
| "loss": 0.1169, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.9071600113183031, | |
| "learning_rate": 7.201060255163755e-06, | |
| "loss": 0.1176, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.5857425715812004, | |
| "learning_rate": 7.191646433163992e-06, | |
| "loss": 0.1221, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.882899987743563, | |
| "learning_rate": 7.1822229846997246e-06, | |
| "loss": 0.1043, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.738099622547025, | |
| "learning_rate": 7.172789951161979e-06, | |
| "loss": 0.1308, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 3.4937477548191893, | |
| "learning_rate": 7.1633473739838824e-06, | |
| "loss": 0.1169, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.8851301740589876, | |
| "learning_rate": 7.153895294640476e-06, | |
| "loss": 0.1128, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.2372720900797014, | |
| "learning_rate": 7.144433754648545e-06, | |
| "loss": 0.1492, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.348361521037077, | |
| "learning_rate": 7.134962795566425e-06, | |
| "loss": 0.0779, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.048315586664474, | |
| "learning_rate": 7.125482458993821e-06, | |
| "loss": 0.1393, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 3.7206749477746093, | |
| "learning_rate": 7.115992786571633e-06, | |
| "loss": 0.1983, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.6769013220582096, | |
| "learning_rate": 7.106493819981763e-06, | |
| "loss": 0.1311, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.8042305785903017, | |
| "learning_rate": 7.096985600946937e-06, | |
| "loss": 0.1074, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 5.252250341498427, | |
| "learning_rate": 7.0874681712305236e-06, | |
| "loss": 0.0803, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.934854094701551, | |
| "learning_rate": 7.0779415726363446e-06, | |
| "loss": 0.1035, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.888534086971409, | |
| "learning_rate": 7.0684058470084946e-06, | |
| "loss": 0.1445, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.1608538431314646, | |
| "learning_rate": 7.0588610362311595e-06, | |
| "loss": 0.1585, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.0745478481957553, | |
| "learning_rate": 7.049307182228428e-06, | |
| "loss": 0.1936, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 3.1127380539648977, | |
| "learning_rate": 7.0397443269641155e-06, | |
| "loss": 0.1696, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.4624486732374409, | |
| "learning_rate": 7.030172512441563e-06, | |
| "loss": 0.0657, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.026613587742609, | |
| "learning_rate": 7.020591780703474e-06, | |
| "loss": 0.1483, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.9299575112371328, | |
| "learning_rate": 7.011002173831714e-06, | |
| "loss": 0.1451, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 6.977416407574812, | |
| "learning_rate": 7.001403733947134e-06, | |
| "loss": 0.1248, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.208409953766827, | |
| "learning_rate": 6.991796503209382e-06, | |
| "loss": 0.165, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.78537720018225, | |
| "learning_rate": 6.982180523816715e-06, | |
| "loss": 0.1323, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.4678620733297283, | |
| "learning_rate": 6.972555838005823e-06, | |
| "loss": 0.136, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.6804545502803885, | |
| "learning_rate": 6.962922488051632e-06, | |
| "loss": 0.2074, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 3.327651450853166, | |
| "learning_rate": 6.953280516267129e-06, | |
| "loss": 0.1505, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.751593814388165, | |
| "learning_rate": 6.943629965003167e-06, | |
| "loss": 0.1305, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.897509617686097, | |
| "learning_rate": 6.933970876648284e-06, | |
| "loss": 0.1226, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.630112664652096, | |
| "learning_rate": 6.924303293628517e-06, | |
| "loss": 0.1342, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.7661430864522407, | |
| "learning_rate": 6.914627258407214e-06, | |
| "loss": 0.1188, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.7454628935933312, | |
| "learning_rate": 6.9049428134848475e-06, | |
| "loss": 0.1326, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.8984246938264262, | |
| "learning_rate": 6.895250001398828e-06, | |
| "loss": 0.0928, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 6.277357201489613, | |
| "learning_rate": 6.885548864723319e-06, | |
| "loss": 0.1234, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.758879562191011, | |
| "learning_rate": 6.875839446069048e-06, | |
| "loss": 0.1347, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.8599771677057306, | |
| "learning_rate": 6.866121788083118e-06, | |
| "loss": 0.1581, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.6561180713973451, | |
| "learning_rate": 6.856395933448823e-06, | |
| "loss": 0.1096, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.748117733072006, | |
| "learning_rate": 6.846661924885461e-06, | |
| "loss": 0.1261, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.8826540597064463, | |
| "learning_rate": 6.836919805148142e-06, | |
| "loss": 0.1331, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.977092053423411, | |
| "learning_rate": 6.827169617027607e-06, | |
| "loss": 0.1258, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.168255589615196, | |
| "learning_rate": 6.817411403350031e-06, | |
| "loss": 0.0836, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.9318484653801353, | |
| "learning_rate": 6.807645206976847e-06, | |
| "loss": 0.0789, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7287551190043524, | |
| "learning_rate": 6.797871070804543e-06, | |
| "loss": 0.1202, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.4782462748125278, | |
| "learning_rate": 6.788089037764487e-06, | |
| "loss": 0.1157, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7883602516215786, | |
| "learning_rate": 6.7782991508227295e-06, | |
| "loss": 0.0899, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.0292778027776923, | |
| "learning_rate": 6.768501452979817e-06, | |
| "loss": 0.1565, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.1898092797254387, | |
| "learning_rate": 6.758695987270609e-06, | |
| "loss": 0.1279, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.5131328560579043, | |
| "learning_rate": 6.748882796764083e-06, | |
| "loss": 0.1098, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.765825136606413, | |
| "learning_rate": 6.739061924563141e-06, | |
| "loss": 0.1024, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.013126733866634, | |
| "learning_rate": 6.729233413804434e-06, | |
| "loss": 0.1135, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.5407548773847035, | |
| "learning_rate": 6.719397307658154e-06, | |
| "loss": 0.1041, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.8806968993386401, | |
| "learning_rate": 6.709553649327865e-06, | |
| "loss": 0.1386, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.6939211307688753, | |
| "learning_rate": 6.699702482050298e-06, | |
| "loss": 0.1329, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.5237569971474656, | |
| "learning_rate": 6.689843849095164e-06, | |
| "loss": 0.1758, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.5189326500238918, | |
| "learning_rate": 6.679977793764969e-06, | |
| "loss": 0.1293, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.6096633555014934, | |
| "learning_rate": 6.6701043593948174e-06, | |
| "loss": 0.1214, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.3765375159205888, | |
| "learning_rate": 6.6602235893522294e-06, | |
| "loss": 0.1259, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.9383351279687073, | |
| "learning_rate": 6.650335527036943e-06, | |
| "loss": 0.1437, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.1740605725514452, | |
| "learning_rate": 6.640440215880726e-06, | |
| "loss": 0.1197, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.8553635326474456, | |
| "learning_rate": 6.630537699347187e-06, | |
| "loss": 0.116, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.6232991847863223, | |
| "learning_rate": 6.620628020931581e-06, | |
| "loss": 0.1031, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.5829400518191838, | |
| "learning_rate": 6.6107112241606255e-06, | |
| "loss": 0.0908, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.0288417622221298, | |
| "learning_rate": 6.600787352592297e-06, | |
| "loss": 0.0982, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.984936118733807, | |
| "learning_rate": 6.590856449815654e-06, | |
| "loss": 0.1517, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.805427898334222, | |
| "learning_rate": 6.580918559450632e-06, | |
| "loss": 0.1266, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.8927659434737785, | |
| "learning_rate": 6.5709737251478646e-06, | |
| "loss": 0.1593, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 8.785448329054995, | |
| "learning_rate": 6.561021990588479e-06, | |
| "loss": 0.1226, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.113868351962621, | |
| "learning_rate": 6.551063399483919e-06, | |
| "loss": 0.1294, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.950094914671727, | |
| "learning_rate": 6.541097995575737e-06, | |
| "loss": 0.1589, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.1112170290978525, | |
| "learning_rate": 6.531125822635413e-06, | |
| "loss": 0.1329, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.5475797916627902, | |
| "learning_rate": 6.521146924464157e-06, | |
| "loss": 0.1196, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.9767440453636411, | |
| "learning_rate": 6.511161344892721e-06, | |
| "loss": 0.1109, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.6265570229799957, | |
| "learning_rate": 6.501169127781205e-06, | |
| "loss": 0.1265, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.8140713966796398, | |
| "learning_rate": 6.491170317018859e-06, | |
| "loss": 0.0926, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.512997483717891, | |
| "learning_rate": 6.481164956523898e-06, | |
| "loss": 0.1062, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.5835209330529558, | |
| "learning_rate": 6.4711530902433024e-06, | |
| "loss": 0.1214, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.1810482657565533, | |
| "learning_rate": 6.461134762152634e-06, | |
| "loss": 0.1884, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.751236145968338, | |
| "learning_rate": 6.45111001625583e-06, | |
| "loss": 0.1357, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.1664736568653233, | |
| "learning_rate": 6.441078896585024e-06, | |
| "loss": 0.2049, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.2044176541204767, | |
| "learning_rate": 6.431041447200339e-06, | |
| "loss": 0.1157, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.8820357970464658, | |
| "learning_rate": 6.4209977121897025e-06, | |
| "loss": 0.1335, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.5346932864729, | |
| "learning_rate": 6.4109477356686545e-06, | |
| "loss": 0.0982, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.4430126211684466, | |
| "learning_rate": 6.400891561780144e-06, | |
| "loss": 0.0938, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.6600799015614198, | |
| "learning_rate": 6.390829234694345e-06, | |
| "loss": 0.1057, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.6253330956260457, | |
| "learning_rate": 6.380760798608452e-06, | |
| "loss": 0.0828, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.9213558492054916, | |
| "learning_rate": 6.370686297746504e-06, | |
| "loss": 0.0921, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.5362733191840696, | |
| "learning_rate": 6.36060577635917e-06, | |
| "loss": 0.1105, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.8321777012247429, | |
| "learning_rate": 6.350519278723563e-06, | |
| "loss": 0.1472, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.7131830343394663, | |
| "learning_rate": 6.340426849143048e-06, | |
| "loss": 0.1199, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.5816409557797964, | |
| "learning_rate": 6.330328531947045e-06, | |
| "loss": 0.147, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.123874168031327, | |
| "learning_rate": 6.3202243714908374e-06, | |
| "loss": 0.1399, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.816343510012244, | |
| "learning_rate": 6.310114412155369e-06, | |
| "loss": 0.1185, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.584769637698625, | |
| "learning_rate": 6.299998698347055e-06, | |
| "loss": 0.0931, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.9058687524566538, | |
| "learning_rate": 6.28987727449759e-06, | |
| "loss": 0.1248, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.548949465664935, | |
| "learning_rate": 6.2797501850637465e-06, | |
| "loss": 0.1294, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.1149962944001177, | |
| "learning_rate": 6.2696174745271795e-06, | |
| "loss": 0.1329, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.9168275656736968, | |
| "learning_rate": 6.25947918739424e-06, | |
| "loss": 0.0828, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.57954895290732, | |
| "learning_rate": 6.249335368195771e-06, | |
| "loss": 0.1295, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.2957808654358192, | |
| "learning_rate": 6.239186061486911e-06, | |
| "loss": 0.1083, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.1345641796436556, | |
| "learning_rate": 6.229031311846902e-06, | |
| "loss": 0.1097, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.7952238152505315, | |
| "learning_rate": 6.218871163878899e-06, | |
| "loss": 0.0948, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.8358217756709838, | |
| "learning_rate": 6.208705662209763e-06, | |
| "loss": 0.1335, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.0599905715702804, | |
| "learning_rate": 6.198534851489872e-06, | |
| "loss": 0.1316, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.7338996737841588, | |
| "learning_rate": 6.188358776392921e-06, | |
| "loss": 0.129, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.1122584913953952, | |
| "learning_rate": 6.178177481615731e-06, | |
| "loss": 0.1262, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.9439971478444478, | |
| "learning_rate": 6.1679910118780485e-06, | |
| "loss": 0.1256, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.84689942662513, | |
| "learning_rate": 6.1577994119223505e-06, | |
| "loss": 0.1147, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.5530955100809665, | |
| "learning_rate": 6.147602726513648e-06, | |
| "loss": 0.1103, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.2899708375245047, | |
| "learning_rate": 6.137401000439286e-06, | |
| "loss": 0.0754, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.9394761284686224, | |
| "learning_rate": 6.127194278508753e-06, | |
| "loss": 0.1672, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.5800890047158567, | |
| "learning_rate": 6.116982605553482e-06, | |
| "loss": 0.1557, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.3971769786049262, | |
| "learning_rate": 6.1067660264266496e-06, | |
| "loss": 0.1043, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.5778275295516868, | |
| "learning_rate": 6.096544586002983e-06, | |
| "loss": 0.0854, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.8220202039345554, | |
| "learning_rate": 6.0863183291785626e-06, | |
| "loss": 0.1207, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 3.3027913066854846, | |
| "learning_rate": 6.076087300870622e-06, | |
| "loss": 0.1567, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.696851243751135, | |
| "learning_rate": 6.065851546017357e-06, | |
| "loss": 0.1497, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.8274017982330293, | |
| "learning_rate": 6.055611109577722e-06, | |
| "loss": 0.1295, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.1597068887132207, | |
| "learning_rate": 6.045366036531229e-06, | |
| "loss": 0.1201, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.104931146165314, | |
| "learning_rate": 6.035116371877766e-06, | |
| "loss": 0.1434, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.6266980971348304, | |
| "learning_rate": 6.024862160637379e-06, | |
| "loss": 0.1101, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.4189796649639415, | |
| "learning_rate": 6.014603447850091e-06, | |
| "loss": 0.1249, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.4298746279674857, | |
| "learning_rate": 6.004340278575695e-06, | |
| "loss": 0.0841, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.9802183561373556, | |
| "learning_rate": 5.994072697893559e-06, | |
| "loss": 0.1289, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.905303404537033, | |
| "learning_rate": 5.983800750902425e-06, | |
| "loss": 0.1862, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.3821102495196778, | |
| "learning_rate": 5.973524482720216e-06, | |
| "loss": 0.0751, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.87233322122542, | |
| "learning_rate": 5.963243938483834e-06, | |
| "loss": 0.1401, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.338191434797121, | |
| "learning_rate": 5.952959163348965e-06, | |
| "loss": 0.1393, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.9099485834004324, | |
| "learning_rate": 5.942670202489874e-06, | |
| "loss": 0.1457, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.0645583006230246, | |
| "learning_rate": 5.932377101099215e-06, | |
| "loss": 0.1069, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.5884995110913243, | |
| "learning_rate": 5.922079904387826e-06, | |
| "loss": 0.1397, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.1415758056462293, | |
| "learning_rate": 5.911778657584538e-06, | |
| "loss": 0.1029, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.5354706525429889, | |
| "learning_rate": 5.9014734059359665e-06, | |
| "loss": 0.0856, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.9302822282714498, | |
| "learning_rate": 5.891164194706318e-06, | |
| "loss": 0.0894, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.129932164410851, | |
| "learning_rate": 5.880851069177193e-06, | |
| "loss": 0.1306, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.53369839366938, | |
| "learning_rate": 5.870534074647382e-06, | |
| "loss": 0.1148, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.5416405545595093, | |
| "learning_rate": 5.860213256432674e-06, | |
| "loss": 0.1018, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.3749980185297184, | |
| "learning_rate": 5.849888659865649e-06, | |
| "loss": 0.0845, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.6540761654729228, | |
| "learning_rate": 5.839560330295485e-06, | |
| "loss": 0.0952, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.662159393449827, | |
| "learning_rate": 5.829228313087756e-06, | |
| "loss": 0.0994, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.9503366133926616, | |
| "learning_rate": 5.818892653624229e-06, | |
| "loss": 0.1315, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.702587301532874, | |
| "learning_rate": 5.80855339730268e-06, | |
| "loss": 0.1484, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.8348978334560153, | |
| "learning_rate": 5.7982105895366725e-06, | |
| "loss": 0.1406, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.022613796849133, | |
| "learning_rate": 5.787864275755375e-06, | |
| "loss": 0.1319, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.5023748634115024, | |
| "learning_rate": 5.777514501403353e-06, | |
| "loss": 0.0929, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.7013224101759017, | |
| "learning_rate": 5.767161311940372e-06, | |
| "loss": 0.0932, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.8315351550901093, | |
| "learning_rate": 5.756804752841203e-06, | |
| "loss": 0.1173, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 4.0170416119005425, | |
| "learning_rate": 5.746444869595411e-06, | |
| "loss": 0.1334, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.8673140531034689, | |
| "learning_rate": 5.736081707707163e-06, | |
| "loss": 0.1484, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.210533914692524, | |
| "learning_rate": 5.725715312695032e-06, | |
| "loss": 0.1016, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.989411601512688, | |
| "learning_rate": 5.715345730091786e-06, | |
| "loss": 0.1052, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.273203843016005, | |
| "learning_rate": 5.704973005444194e-06, | |
| "loss": 0.1804, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 4.674460990852892, | |
| "learning_rate": 5.6945971843128334e-06, | |
| "loss": 0.09, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.8538049522688833, | |
| "learning_rate": 5.684218312271874e-06, | |
| "loss": 0.114, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.180520132721186, | |
| "learning_rate": 5.6738364349088905e-06, | |
| "loss": 0.1309, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.5591693338815162, | |
| "learning_rate": 5.663451597824655e-06, | |
| "loss": 0.1403, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.7568489771713605, | |
| "learning_rate": 5.653063846632945e-06, | |
| "loss": 0.0832, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.7500043042100533, | |
| "learning_rate": 5.642673226960332e-06, | |
| "loss": 0.1241, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.8855691624550837, | |
| "learning_rate": 5.632279784445991e-06, | |
| "loss": 0.124, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.626920358067962, | |
| "learning_rate": 5.621883564741494e-06, | |
| "loss": 0.102, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.8032329660583741, | |
| "learning_rate": 5.611484613510608e-06, | |
| "loss": 0.1484, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.9140643898960137, | |
| "learning_rate": 5.6010829764291074e-06, | |
| "loss": 0.1215, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.1624692381610573, | |
| "learning_rate": 5.590678699184553e-06, | |
| "loss": 0.1296, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.023676143260662, | |
| "learning_rate": 5.580271827476111e-06, | |
| "loss": 0.1355, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.5620252064385884, | |
| "learning_rate": 5.569862407014334e-06, | |
| "loss": 0.1178, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.1511386883957877, | |
| "learning_rate": 5.5594504835209786e-06, | |
| "loss": 0.0788, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.5616956818437346, | |
| "learning_rate": 5.549036102728791e-06, | |
| "loss": 0.146, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.8693991078131527, | |
| "learning_rate": 5.538619310381313e-06, | |
| "loss": 0.099, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.052175802220878, | |
| "learning_rate": 5.528200152232674e-06, | |
| "loss": 0.137, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 3.7482892753111607, | |
| "learning_rate": 5.5177786740474e-06, | |
| "loss": 0.124, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.6066071271539237, | |
| "learning_rate": 5.507354921600205e-06, | |
| "loss": 0.1207, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.1673157599924906, | |
| "learning_rate": 5.496928940675795e-06, | |
| "loss": 0.0943, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 4.142894335501094, | |
| "learning_rate": 5.486500777068659e-06, | |
| "loss": 0.1771, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.0460770317618135, | |
| "learning_rate": 5.47607047658288e-06, | |
| "loss": 0.1314, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.1058657181662372, | |
| "learning_rate": 5.465638085031918e-06, | |
| "loss": 0.1403, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.6840379754298231, | |
| "learning_rate": 5.4552036482384275e-06, | |
| "loss": 0.1506, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.9289322283317214, | |
| "learning_rate": 5.444767212034039e-06, | |
| "loss": 0.1023, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.625262969875529, | |
| "learning_rate": 5.434328822259167e-06, | |
| "loss": 0.113, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.4113122069453283, | |
| "learning_rate": 5.423888524762812e-06, | |
| "loss": 0.1012, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.1703867341982583, | |
| "learning_rate": 5.413446365402344e-06, | |
| "loss": 0.1802, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.4290049155356024, | |
| "learning_rate": 5.403002390043319e-06, | |
| "loss": 0.095, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.3435851884276515, | |
| "learning_rate": 5.3925566445592684e-06, | |
| "loss": 0.0858, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 3.711060268727994, | |
| "learning_rate": 5.382109174831493e-06, | |
| "loss": 0.1415, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.7557646547416874, | |
| "learning_rate": 5.3716600267488764e-06, | |
| "loss": 0.1082, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.0596730304085207, | |
| "learning_rate": 5.361209246207662e-06, | |
| "loss": 0.1089, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.935238394520285, | |
| "learning_rate": 5.350756879111278e-06, | |
| "loss": 0.1137, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.5074975424220682, | |
| "learning_rate": 5.340302971370108e-06, | |
| "loss": 0.1334, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.753317095876859, | |
| "learning_rate": 5.329847568901311e-06, | |
| "loss": 0.1018, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.6666992189358707, | |
| "learning_rate": 5.319390717628607e-06, | |
| "loss": 0.1067, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.1441072331379893, | |
| "learning_rate": 5.308932463482082e-06, | |
| "loss": 0.0976, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.9819008451024653, | |
| "learning_rate": 5.298472852397983e-06, | |
| "loss": 0.125, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.078504932257869, | |
| "learning_rate": 5.288011930318518e-06, | |
| "loss": 0.1313, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.7500202651291776, | |
| "learning_rate": 5.277549743191653e-06, | |
| "loss": 0.1256, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.957028569964894, | |
| "learning_rate": 5.267086336970908e-06, | |
| "loss": 0.119, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.64080169082907, | |
| "learning_rate": 5.256621757615161e-06, | |
| "loss": 0.1017, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.912980083382728, | |
| "learning_rate": 5.2461560510884436e-06, | |
| "loss": 0.132, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.5189601667278445, | |
| "learning_rate": 5.235689263359734e-06, | |
| "loss": 0.1062, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.553830301206541, | |
| "learning_rate": 5.2252214404027636e-06, | |
| "loss": 0.1292, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.8629058125857634, | |
| "learning_rate": 5.214752628195807e-06, | |
| "loss": 0.1293, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.593636492020973, | |
| "learning_rate": 5.2042828727214866e-06, | |
| "loss": 0.0956, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.3310531956555536, | |
| "learning_rate": 5.1938122199665685e-06, | |
| "loss": 0.1161, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.8528554576118663, | |
| "learning_rate": 5.183340715921759e-06, | |
| "loss": 0.1382, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.492914105028018, | |
| "learning_rate": 5.172868406581502e-06, | |
| "loss": 0.0887, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.6396055924297854, | |
| "learning_rate": 5.16239533794378e-06, | |
| "loss": 0.1128, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.974365331118826, | |
| "learning_rate": 5.1519215560099116e-06, | |
| "loss": 0.1094, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.0195657375792595, | |
| "learning_rate": 5.141447106784347e-06, | |
| "loss": 0.1017, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.9226138716911316, | |
| "learning_rate": 5.130972036274466e-06, | |
| "loss": 0.1107, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.8625360778194306, | |
| "learning_rate": 5.120496390490382e-06, | |
| "loss": 0.1265, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.5463815816542692, | |
| "learning_rate": 5.110020215444731e-06, | |
| "loss": 0.0986, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.9289825743188587, | |
| "learning_rate": 5.099543557152474e-06, | |
| "loss": 0.1583, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.9666897967664563, | |
| "learning_rate": 5.089066461630698e-06, | |
| "loss": 0.1363, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.9958797459896542, | |
| "learning_rate": 5.0785889748984055e-06, | |
| "loss": 0.1282, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.9239342659219918, | |
| "learning_rate": 5.06811114297632e-06, | |
| "loss": 0.1207, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.8061249514244604, | |
| "learning_rate": 5.057633011886683e-06, | |
| "loss": 0.1127, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.8662644427333124, | |
| "learning_rate": 5.047154627653045e-06, | |
| "loss": 0.0944, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.090550754244602, | |
| "learning_rate": 5.036676036300074e-06, | |
| "loss": 0.1055, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.7189547955473783, | |
| "learning_rate": 5.026197283853345e-06, | |
| "loss": 0.1271, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.6763816609777438, | |
| "learning_rate": 5.015718416339139e-06, | |
| "loss": 0.1119, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 3.0710333881750462, | |
| "learning_rate": 5.005239479784246e-06, | |
| "loss": 0.1109, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.2908947072097592, | |
| "learning_rate": 4.994760520215755e-06, | |
| "loss": 0.0838, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 4.726408361017791, | |
| "learning_rate": 4.984281583660864e-06, | |
| "loss": 0.1009, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.6042202190498385, | |
| "learning_rate": 4.973802716146658e-06, | |
| "loss": 0.1591, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.6118800225800851, | |
| "learning_rate": 4.963323963699926e-06, | |
| "loss": 0.1129, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.7270638214518879, | |
| "learning_rate": 4.9528453723469575e-06, | |
| "loss": 0.1113, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.7432315697154184, | |
| "learning_rate": 4.942366988113319e-06, | |
| "loss": 0.0861, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.0906970873027073, | |
| "learning_rate": 4.931888857023682e-06, | |
| "loss": 0.1382, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 3.3501367666525494, | |
| "learning_rate": 4.921411025101597e-06, | |
| "loss": 0.1169, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.4566720262640316, | |
| "learning_rate": 4.910933538369303e-06, | |
| "loss": 0.106, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.5269663618460712, | |
| "learning_rate": 4.900456442847528e-06, | |
| "loss": 0.0916, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.7453102664715303, | |
| "learning_rate": 4.88997978455527e-06, | |
| "loss": 0.1402, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.018716604307633, | |
| "learning_rate": 4.879503609509619e-06, | |
| "loss": 0.1095, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.1106306694591015, | |
| "learning_rate": 4.869027963725536e-06, | |
| "loss": 0.1505, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.1395796327180947, | |
| "learning_rate": 4.858552893215655e-06, | |
| "loss": 0.1141, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.4202165199124486, | |
| "learning_rate": 4.84807844399009e-06, | |
| "loss": 0.1705, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.835373663677108, | |
| "learning_rate": 4.837604662056222e-06, | |
| "loss": 0.1313, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.8005738743632453, | |
| "learning_rate": 4.827131593418501e-06, | |
| "loss": 0.1449, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.124505896703758, | |
| "learning_rate": 4.816659284078243e-06, | |
| "loss": 0.1478, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.127837289735595, | |
| "learning_rate": 4.8061877800334315e-06, | |
| "loss": 0.1886, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.1604683050081326, | |
| "learning_rate": 4.795717127278515e-06, | |
| "loss": 0.0786, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.9637692788927852, | |
| "learning_rate": 4.785247371804195e-06, | |
| "loss": 0.1063, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.7173564883561871, | |
| "learning_rate": 4.77477855959724e-06, | |
| "loss": 0.1149, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.5729152882720685, | |
| "learning_rate": 4.764310736640267e-06, | |
| "loss": 0.1293, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.6209397414677442, | |
| "learning_rate": 4.753843948911556e-06, | |
| "loss": 0.1285, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.8314069234913113, | |
| "learning_rate": 4.74337824238484e-06, | |
| "loss": 0.1209, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.570200559050954, | |
| "learning_rate": 4.732913663029093e-06, | |
| "loss": 0.1127, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.7368184688198605, | |
| "learning_rate": 4.72245025680835e-06, | |
| "loss": 0.1441, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.5220971625438804, | |
| "learning_rate": 4.7119880696814835e-06, | |
| "loss": 0.0893, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.8443284641960835, | |
| "learning_rate": 4.701527147602018e-06, | |
| "loss": 0.1247, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.2951417910999217, | |
| "learning_rate": 4.691067536517921e-06, | |
| "loss": 0.1263, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.8763288375345808, | |
| "learning_rate": 4.6806092823713944e-06, | |
| "loss": 0.1207, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.6884260275044232, | |
| "learning_rate": 4.670152431098692e-06, | |
| "loss": 0.1242, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.5129931167494661, | |
| "learning_rate": 4.659697028629894e-06, | |
| "loss": 0.1067, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.5727784925762973, | |
| "learning_rate": 4.649243120888723e-06, | |
| "loss": 0.0828, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.1329407103752187, | |
| "learning_rate": 4.6387907537923385e-06, | |
| "loss": 0.0991, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.7788421760508666, | |
| "learning_rate": 4.628339973251126e-06, | |
| "loss": 0.1151, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.413683936731647, | |
| "learning_rate": 4.617890825168507e-06, | |
| "loss": 0.102, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.2207040797969175, | |
| "learning_rate": 4.607443355440734e-06, | |
| "loss": 0.1806, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.3957444384519695, | |
| "learning_rate": 4.596997609956682e-06, | |
| "loss": 0.1311, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.8035465675076767, | |
| "learning_rate": 4.5865536345976576e-06, | |
| "loss": 0.1434, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.819391164170477, | |
| "learning_rate": 4.576111475237191e-06, | |
| "loss": 0.1559, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.7452355098149885, | |
| "learning_rate": 4.565671177740834e-06, | |
| "loss": 0.0959, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.8760725666495923, | |
| "learning_rate": 4.555232787965963e-06, | |
| "loss": 0.107, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.2518737780619866, | |
| "learning_rate": 4.544796351761574e-06, | |
| "loss": 0.0914, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.2185919430350984, | |
| "learning_rate": 4.534361914968083e-06, | |
| "loss": 0.1812, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.146679707787453, | |
| "learning_rate": 4.523929523417122e-06, | |
| "loss": 0.1183, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.7114313656953086, | |
| "learning_rate": 4.513499222931342e-06, | |
| "loss": 0.1194, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.8394742753028903, | |
| "learning_rate": 4.503071059324206e-06, | |
| "loss": 0.1025, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.8273169411773014, | |
| "learning_rate": 4.492645078399795e-06, | |
| "loss": 0.0947, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.7740798125372605, | |
| "learning_rate": 4.482221325952602e-06, | |
| "loss": 0.1431, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.1192422103295256, | |
| "learning_rate": 4.471799847767328e-06, | |
| "loss": 0.122, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.0660160064685416, | |
| "learning_rate": 4.4613806896186906e-06, | |
| "loss": 0.1058, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.5742033350811535, | |
| "learning_rate": 4.450963897271211e-06, | |
| "loss": 0.0916, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.8119951469136164, | |
| "learning_rate": 4.440549516479022e-06, | |
| "loss": 0.1163, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.1159566523211066, | |
| "learning_rate": 4.430137592985669e-06, | |
| "loss": 0.1018, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.126873605321964, | |
| "learning_rate": 4.419728172523892e-06, | |
| "loss": 0.1498, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.9676351837368764, | |
| "learning_rate": 4.409321300815449e-06, | |
| "loss": 0.1758, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.709782914468727, | |
| "learning_rate": 4.398917023570894e-06, | |
| "loss": 0.1008, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.9285211732869538, | |
| "learning_rate": 4.388515386489391e-06, | |
| "loss": 0.1254, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.7161777346446483, | |
| "learning_rate": 4.378116435258509e-06, | |
| "loss": 0.084, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.806844395754332, | |
| "learning_rate": 4.36772021555401e-06, | |
| "loss": 0.0986, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.0837972155833735, | |
| "learning_rate": 4.357326773039669e-06, | |
| "loss": 0.0973, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.3185486600701966, | |
| "learning_rate": 4.346936153367056e-06, | |
| "loss": 0.1006, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.5256694435635292, | |
| "learning_rate": 4.336548402175345e-06, | |
| "loss": 0.0739, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.832223196645926, | |
| "learning_rate": 4.326163565091112e-06, | |
| "loss": 0.0987, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.002319765666875, | |
| "learning_rate": 4.315781687728127e-06, | |
| "loss": 0.0726, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.7723246961310433, | |
| "learning_rate": 4.305402815687168e-06, | |
| "loss": 0.1082, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.7830256916450828, | |
| "learning_rate": 4.295026994555807e-06, | |
| "loss": 0.0961, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.8291995749358043, | |
| "learning_rate": 4.284654269908216e-06, | |
| "loss": 0.1442, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.7253042913750083, | |
| "learning_rate": 4.27428468730497e-06, | |
| "loss": 0.1163, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 3.171182731375501, | |
| "learning_rate": 4.263918292292838e-06, | |
| "loss": 0.1588, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.993131212790838, | |
| "learning_rate": 4.253555130404592e-06, | |
| "loss": 0.1184, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.9909346999767512, | |
| "learning_rate": 4.243195247158798e-06, | |
| "loss": 0.1361, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.985061431936418, | |
| "learning_rate": 4.232838688059628e-06, | |
| "loss": 0.0964, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.751213341901581, | |
| "learning_rate": 4.2224854985966495e-06, | |
| "loss": 0.108, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.9591178958676378, | |
| "learning_rate": 4.212135724244627e-06, | |
| "loss": 0.1276, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.7579749826323114, | |
| "learning_rate": 4.20178941046333e-06, | |
| "loss": 0.1062, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.6097228735170386, | |
| "learning_rate": 4.191446602697321e-06, | |
| "loss": 0.1016, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.6014944661125698, | |
| "learning_rate": 4.181107346375771e-06, | |
| "loss": 0.1377, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.7711293693423962, | |
| "learning_rate": 4.170771686912247e-06, | |
| "loss": 0.1007, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.7880026810514285, | |
| "learning_rate": 4.160439669704516e-06, | |
| "loss": 0.1143, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.6987412567311313, | |
| "learning_rate": 4.150111340134353e-06, | |
| "loss": 0.1237, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.9768502462590711, | |
| "learning_rate": 4.139786743567328e-06, | |
| "loss": 0.1233, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.3478846945506415, | |
| "learning_rate": 4.129465925352619e-06, | |
| "loss": 0.1044, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.9981013791009108, | |
| "learning_rate": 4.11914893082281e-06, | |
| "loss": 0.1339, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.8517464074009944, | |
| "learning_rate": 4.108835805293684e-06, | |
| "loss": 0.1429, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.5157512033678697, | |
| "learning_rate": 4.098526594064036e-06, | |
| "loss": 0.1062, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.840817457718613, | |
| "learning_rate": 4.0882213424154635e-06, | |
| "loss": 0.1145, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.029610584941451, | |
| "learning_rate": 4.077920095612174e-06, | |
| "loss": 0.1341, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.6338844179144825, | |
| "learning_rate": 4.067622898900788e-06, | |
| "loss": 0.108, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 3.9033531511200743, | |
| "learning_rate": 4.057329797510128e-06, | |
| "loss": 0.1004, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.4571435474781058, | |
| "learning_rate": 4.047040836651037e-06, | |
| "loss": 0.1524, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.5060740442891944, | |
| "learning_rate": 4.036756061516166e-06, | |
| "loss": 0.1106, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.0788331275748475, | |
| "learning_rate": 4.026475517279785e-06, | |
| "loss": 0.1133, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.5851053578741607, | |
| "learning_rate": 4.0161992490975756e-06, | |
| "loss": 0.1226, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.7795177204393597, | |
| "learning_rate": 4.005927302106442e-06, | |
| "loss": 0.1275, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.7209999961939761, | |
| "learning_rate": 3.995659721424305e-06, | |
| "loss": 0.1046, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.5459099455260035, | |
| "learning_rate": 3.98539655214991e-06, | |
| "loss": 0.0984, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.7475161853285592, | |
| "learning_rate": 3.9751378393626224e-06, | |
| "loss": 0.1262, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.5464582238846574, | |
| "learning_rate": 3.964883628122237e-06, | |
| "loss": 0.0849, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.389196944686995, | |
| "learning_rate": 3.954633963468772e-06, | |
| "loss": 0.114, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.7043708369854134, | |
| "learning_rate": 3.944388890422281e-06, | |
| "loss": 0.1255, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.4523407706533134, | |
| "learning_rate": 3.934148453982643e-06, | |
| "loss": 0.1066, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.004405717963473, | |
| "learning_rate": 3.923912699129378e-06, | |
| "loss": 0.1068, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.6414686497301643, | |
| "learning_rate": 3.91368167082144e-06, | |
| "loss": 0.1037, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 3.04799587934896, | |
| "learning_rate": 3.903455413997018e-06, | |
| "loss": 0.154, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.946993384701469, | |
| "learning_rate": 3.893233973573351e-06, | |
| "loss": 0.1132, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.7235353612078088, | |
| "learning_rate": 3.88301739444652e-06, | |
| "loss": 0.1088, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.6453579291204798, | |
| "learning_rate": 3.872805721491247e-06, | |
| "loss": 0.1041, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.7804807126926472, | |
| "learning_rate": 3.862598999560717e-06, | |
| "loss": 0.1117, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.5177028945223077, | |
| "learning_rate": 3.852397273486354e-06, | |
| "loss": 0.1091, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 4.018703368543647, | |
| "learning_rate": 3.8422005880776495e-06, | |
| "loss": 0.1085, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.218370435268377, | |
| "learning_rate": 3.832008988121953e-06, | |
| "loss": 0.1337, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.0545128228188863, | |
| "learning_rate": 3.82182251838427e-06, | |
| "loss": 0.156, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.7606510408251705, | |
| "learning_rate": 3.8116412236070823e-06, | |
| "loss": 0.1104, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.8099072720687117, | |
| "learning_rate": 3.8014651485101304e-06, | |
| "loss": 0.1153, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.317637310297308, | |
| "learning_rate": 3.7912943377902374e-06, | |
| "loss": 0.1386, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.261715069348875, | |
| "learning_rate": 3.7811288361211025e-06, | |
| "loss": 0.1177, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.1146627770061937, | |
| "learning_rate": 3.770968688153098e-06, | |
| "loss": 0.1207, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.3219966429312695, | |
| "learning_rate": 3.760813938513092e-06, | |
| "loss": 0.1432, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.5551696574986462, | |
| "learning_rate": 3.7506646318042307e-06, | |
| "loss": 0.1108, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.6423151381409171, | |
| "learning_rate": 3.740520812605759e-06, | |
| "loss": 0.0809, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 2.1270054148515642, | |
| "learning_rate": 3.730382525472822e-06, | |
| "loss": 0.1488, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.4525654065999545, | |
| "learning_rate": 3.720249814936255e-06, | |
| "loss": 0.1703, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.6466509751453386, | |
| "learning_rate": 3.7101227255024113e-06, | |
| "loss": 0.0976, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.453861060575215, | |
| "learning_rate": 3.700001301652947e-06, | |
| "loss": 0.1087, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.5946568411941369, | |
| "learning_rate": 3.689885587844633e-06, | |
| "loss": 0.098, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.0732225644841873, | |
| "learning_rate": 3.6797756285091634e-06, | |
| "loss": 0.1449, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.657934801330924, | |
| "learning_rate": 3.6696714680529544e-06, | |
| "loss": 0.134, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.9263667651287582, | |
| "learning_rate": 3.659573150856954e-06, | |
| "loss": 0.1372, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.6330537523458593, | |
| "learning_rate": 3.6494807212764396e-06, | |
| "loss": 0.1184, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.8951926315625367, | |
| "learning_rate": 3.639394223640831e-06, | |
| "loss": 0.1222, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.6317636026091127, | |
| "learning_rate": 3.6293137022534967e-06, | |
| "loss": 0.1007, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.108600411804529, | |
| "learning_rate": 3.619239201391548e-06, | |
| "loss": 0.1061, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.8920524247392, | |
| "learning_rate": 3.6091707653056584e-06, | |
| "loss": 0.1553, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.8232758558650688, | |
| "learning_rate": 3.599108438219857e-06, | |
| "loss": 0.1274, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.804319167028759, | |
| "learning_rate": 3.5890522643313454e-06, | |
| "loss": 0.112, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.3909948963927101, | |
| "learning_rate": 3.5790022878102983e-06, | |
| "loss": 0.0969, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.44326112841619, | |
| "learning_rate": 3.568958552799662e-06, | |
| "loss": 0.1218, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.572623722062717, | |
| "learning_rate": 3.5589211034149783e-06, | |
| "loss": 0.1032, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.163157034822567, | |
| "learning_rate": 3.5488899837441703e-06, | |
| "loss": 0.095, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.400615824919047, | |
| "learning_rate": 3.5388652378473664e-06, | |
| "loss": 0.0949, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.5133479004266996, | |
| "learning_rate": 3.5288469097566992e-06, | |
| "loss": 0.1234, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.6235308351488011, | |
| "learning_rate": 3.518835043476103e-06, | |
| "loss": 0.0951, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.9162477641836664, | |
| "learning_rate": 3.508829682981143e-06, | |
| "loss": 0.1454, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.109715126524471, | |
| "learning_rate": 3.4988308722187963e-06, | |
| "loss": 0.1523, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.4559210937158173, | |
| "learning_rate": 3.488838655107279e-06, | |
| "loss": 0.0731, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.506470505523408, | |
| "learning_rate": 3.4788530755358453e-06, | |
| "loss": 0.0898, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.613660972390155, | |
| "learning_rate": 3.468874177364589e-06, | |
| "loss": 0.0983, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.5717909485298631, | |
| "learning_rate": 3.458902004424265e-06, | |
| "loss": 0.0973, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.4205799220113384, | |
| "learning_rate": 3.4489366005160823e-06, | |
| "loss": 0.0959, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.3451838006850796, | |
| "learning_rate": 3.4389780094115206e-06, | |
| "loss": 0.1076, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.6388159985908506, | |
| "learning_rate": 3.429026274852137e-06, | |
| "loss": 0.0992, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.5682048979771246, | |
| "learning_rate": 3.4190814405493687e-06, | |
| "loss": 0.1062, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.2502678976586212, | |
| "learning_rate": 3.4091435501843486e-06, | |
| "loss": 0.1168, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.3953877887707054, | |
| "learning_rate": 3.3992126474077035e-06, | |
| "loss": 0.0983, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.6007212383303575, | |
| "learning_rate": 3.3892887758393766e-06, | |
| "loss": 0.1305, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.9440119392902513, | |
| "learning_rate": 3.37937197906842e-06, | |
| "loss": 0.0931, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.6043579363096137, | |
| "learning_rate": 3.3694623006528148e-06, | |
| "loss": 0.0998, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.7136607779901858, | |
| "learning_rate": 3.359559784119277e-06, | |
| "loss": 0.1016, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.5384259717082835, | |
| "learning_rate": 3.349664472963059e-06, | |
| "loss": 0.0989, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.9497723408976853, | |
| "learning_rate": 3.339776410647771e-06, | |
| "loss": 0.1174, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.6067390043096454, | |
| "learning_rate": 3.3298956406051842e-06, | |
| "loss": 0.104, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.2852610652044447, | |
| "learning_rate": 3.320022206235033e-06, | |
| "loss": 0.1062, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.5953068410830473, | |
| "learning_rate": 3.3101561509048383e-06, | |
| "loss": 0.0875, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.6221547492274104, | |
| "learning_rate": 3.3002975179497033e-06, | |
| "loss": 0.1187, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.6549898043169382, | |
| "learning_rate": 3.290446350672134e-06, | |
| "loss": 0.1013, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.1405214271646877, | |
| "learning_rate": 3.2806026923418473e-06, | |
| "loss": 0.1245, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.812234610812394, | |
| "learning_rate": 3.270766586195568e-06, | |
| "loss": 0.0752, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.771591136410723, | |
| "learning_rate": 3.2609380754368603e-06, | |
| "loss": 0.1264, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.980251276861431, | |
| "learning_rate": 3.2511172032359185e-06, | |
| "loss": 0.1027, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.9113584579125493, | |
| "learning_rate": 3.2413040127293906e-06, | |
| "loss": 0.1414, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.812466374310757, | |
| "learning_rate": 3.231498547020184e-06, | |
| "loss": 0.1192, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.6565183224762494, | |
| "learning_rate": 3.2217008491772726e-06, | |
| "loss": 0.0933, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.8256853684029712, | |
| "learning_rate": 3.2119109622355157e-06, | |
| "loss": 0.1312, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.3886097887887576, | |
| "learning_rate": 3.202128929195457e-06, | |
| "loss": 0.1218, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.8207805719268897, | |
| "learning_rate": 3.1923547930231523e-06, | |
| "loss": 0.0944, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 13.002596537052982, | |
| "learning_rate": 3.1825885966499694e-06, | |
| "loss": 0.1071, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 3.180981776214822, | |
| "learning_rate": 3.172830382972394e-06, | |
| "loss": 0.1026, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.132426193384201, | |
| "learning_rate": 3.163080194851859e-06, | |
| "loss": 0.0816, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.6527864232962632, | |
| "learning_rate": 3.1533380751145414e-06, | |
| "loss": 0.1572, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.829324594252865, | |
| "learning_rate": 3.1436040665511787e-06, | |
| "loss": 0.1064, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.6747234050546536, | |
| "learning_rate": 3.1338782119168844e-06, | |
| "loss": 0.0885, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.5358562835425493, | |
| "learning_rate": 3.124160553930953e-06, | |
| "loss": 0.1161, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.2961336183013072, | |
| "learning_rate": 3.1144511352766828e-06, | |
| "loss": 0.0823, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.968300020231063, | |
| "learning_rate": 3.104749998601173e-06, | |
| "loss": 0.1084, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.2556454335910034, | |
| "learning_rate": 3.0950571865151537e-06, | |
| "loss": 0.0966, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.7002655816514025, | |
| "learning_rate": 3.0853727415927882e-06, | |
| "loss": 0.1304, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 6.9645997532026, | |
| "learning_rate": 3.075696706371484e-06, | |
| "loss": 0.1041, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 2.2613558023242604, | |
| "learning_rate": 3.066029123351718e-06, | |
| "loss": 0.1066, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.6857551895264742, | |
| "learning_rate": 3.056370034996835e-06, | |
| "loss": 0.1034, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.6661348639167128, | |
| "learning_rate": 3.0467194837328716e-06, | |
| "loss": 0.1331, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 3.1575535840104454, | |
| "learning_rate": 3.0370775119483694e-06, | |
| "loss": 0.0963, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.649294756841945, | |
| "learning_rate": 3.0274441619941787e-06, | |
| "loss": 0.1244, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.6157138126800377, | |
| "learning_rate": 3.017819476183287e-06, | |
| "loss": 0.1198, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.6710125799816895, | |
| "learning_rate": 3.0082034967906204e-06, | |
| "loss": 0.0964, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.5863459928289523, | |
| "learning_rate": 2.998596266052866e-06, | |
| "loss": 0.1032, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.8074140571890533, | |
| "learning_rate": 2.9889978261682873e-06, | |
| "loss": 0.0888, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.4876864670226553, | |
| "learning_rate": 2.9794082192965272e-06, | |
| "loss": 0.1023, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.7127542347734488, | |
| "learning_rate": 2.9698274875584393e-06, | |
| "loss": 0.1246, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.5660028087829367, | |
| "learning_rate": 2.9602556730358865e-06, | |
| "loss": 0.0858, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.4787554897006587, | |
| "learning_rate": 2.950692817771571e-06, | |
| "loss": 0.1247, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.004753064742886, | |
| "learning_rate": 2.941138963768842e-06, | |
| "loss": 0.1096, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.171685656988102, | |
| "learning_rate": 2.931594152991506e-06, | |
| "loss": 0.1273, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.0310858251944377, | |
| "learning_rate": 2.9220584273636567e-06, | |
| "loss": 0.1156, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.4705758430342852, | |
| "learning_rate": 2.9125318287694777e-06, | |
| "loss": 0.1159, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.5899197031650942, | |
| "learning_rate": 2.903014399053063e-06, | |
| "loss": 0.1144, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.0576756317048566, | |
| "learning_rate": 2.8935061800182395e-06, | |
| "loss": 0.1606, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.8486006145520921, | |
| "learning_rate": 2.884007213428369e-06, | |
| "loss": 0.0805, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 13.902534606824819, | |
| "learning_rate": 2.8745175410061822e-06, | |
| "loss": 0.0827, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.6397252956915422, | |
| "learning_rate": 2.8650372044335783e-06, | |
| "loss": 0.09, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.69662477582431, | |
| "learning_rate": 2.8555662453514565e-06, | |
| "loss": 0.105, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.5278564436727615, | |
| "learning_rate": 2.846104705359527e-06, | |
| "loss": 0.0943, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.6709412297375785, | |
| "learning_rate": 2.836652626016121e-06, | |
| "loss": 0.1357, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.3531841084884235, | |
| "learning_rate": 2.8272100488380237e-06, | |
| "loss": 0.0642, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.5587241550432935, | |
| "learning_rate": 2.817777015300278e-06, | |
| "loss": 0.0952, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.258828525128708, | |
| "learning_rate": 2.80835356683601e-06, | |
| "loss": 0.1698, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.8775581455111865, | |
| "learning_rate": 2.798939744836247e-06, | |
| "loss": 0.1173, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.9208509554836495, | |
| "learning_rate": 2.789535590649728e-06, | |
| "loss": 0.1164, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.0503080097332997, | |
| "learning_rate": 2.780141145582731e-06, | |
| "loss": 0.1087, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 3.344681587427957, | |
| "learning_rate": 2.7707564508988917e-06, | |
| "loss": 0.1493, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.7103934451645413, | |
| "learning_rate": 2.761381547819011e-06, | |
| "loss": 0.1042, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.635862007069694, | |
| "learning_rate": 2.7520164775208867e-06, | |
| "loss": 0.1064, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.136653527750661, | |
| "learning_rate": 2.7426612811391295e-06, | |
| "loss": 0.1268, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.565949660217831, | |
| "learning_rate": 2.733315999764974e-06, | |
| "loss": 0.1289, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 3.561447396904177, | |
| "learning_rate": 2.7239806744461095e-06, | |
| "loss": 0.1026, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.6443642185497302, | |
| "learning_rate": 2.7146553461864943e-06, | |
| "loss": 0.1069, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.537615647007176, | |
| "learning_rate": 2.705340055946177e-06, | |
| "loss": 0.1082, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.5106720702647543, | |
| "learning_rate": 2.6960348446411123e-06, | |
| "loss": 0.1153, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.635526999095943, | |
| "learning_rate": 2.6867397531429895e-06, | |
| "loss": 0.1184, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.9105341445360475, | |
| "learning_rate": 2.6774548222790415e-06, | |
| "loss": 0.1865, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.7420481882111878, | |
| "learning_rate": 2.6681800928318778e-06, | |
| "loss": 0.1478, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.512461760578711, | |
| "learning_rate": 2.6589156055393023e-06, | |
| "loss": 0.0799, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.250094678795374, | |
| "learning_rate": 2.6496614010941217e-06, | |
| "loss": 0.1326, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.883689440298225, | |
| "learning_rate": 2.6404175201439886e-06, | |
| "loss": 0.1234, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.9270058022648389, | |
| "learning_rate": 2.6311840032912006e-06, | |
| "loss": 0.1019, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.4081318296335605, | |
| "learning_rate": 2.62196089109254e-06, | |
| "loss": 0.0826, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 7.6728184484530475, | |
| "learning_rate": 2.6127482240590873e-06, | |
| "loss": 0.1226, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.3907092679086035, | |
| "learning_rate": 2.6035460426560407e-06, | |
| "loss": 0.0727, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.280540245854714, | |
| "learning_rate": 2.5943543873025465e-06, | |
| "loss": 0.1119, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.1251362917376775, | |
| "learning_rate": 2.585173298371511e-06, | |
| "loss": 0.123, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.6712555338356587, | |
| "learning_rate": 2.5760028161894356e-06, | |
| "loss": 0.1194, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.492680856779564, | |
| "learning_rate": 2.566842981036231e-06, | |
| "loss": 0.0897, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.0679531653024457, | |
| "learning_rate": 2.5576938331450384e-06, | |
| "loss": 0.1405, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.3844550472485293, | |
| "learning_rate": 2.548555412702065e-06, | |
| "loss": 0.1385, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.8043797801884316, | |
| "learning_rate": 2.5394277598463903e-06, | |
| "loss": 0.1191, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.8439167974623585, | |
| "learning_rate": 2.5303109146698046e-06, | |
| "loss": 0.1727, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.5465929151314142, | |
| "learning_rate": 2.5212049172166287e-06, | |
| "loss": 0.1213, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.8983516783979946, | |
| "learning_rate": 2.512109807483528e-06, | |
| "loss": 0.1183, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.9065223453576003, | |
| "learning_rate": 2.5030256254193576e-06, | |
| "loss": 0.1327, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.0558919653887355, | |
| "learning_rate": 2.493952410924961e-06, | |
| "loss": 0.1207, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.4722241516028056, | |
| "learning_rate": 2.4848902038530184e-06, | |
| "loss": 0.0975, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.7425038137771816, | |
| "learning_rate": 2.47583904400786e-06, | |
| "loss": 0.1156, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.7363383932679017, | |
| "learning_rate": 2.4667989711452878e-06, | |
| "loss": 0.1093, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.7124149459581317, | |
| "learning_rate": 2.457770024972412e-06, | |
| "loss": 0.095, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 3.3387961439928358, | |
| "learning_rate": 2.4487522451474655e-06, | |
| "loss": 0.1239, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.6878226288878275, | |
| "learning_rate": 2.4397456712796385e-06, | |
| "loss": 0.098, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.5795072509935049, | |
| "learning_rate": 2.430750342928901e-06, | |
| "loss": 0.1133, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.8746300617437088, | |
| "learning_rate": 2.4217662996058226e-06, | |
| "loss": 0.1556, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.5504162897648324, | |
| "learning_rate": 2.4127935807714154e-06, | |
| "loss": 0.1364, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.5859801310552404, | |
| "learning_rate": 2.40383222583694e-06, | |
| "loss": 0.1184, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.635516927638879, | |
| "learning_rate": 2.39488227416375e-06, | |
| "loss": 0.0893, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.1535635990621302, | |
| "learning_rate": 2.3859437650631105e-06, | |
| "loss": 0.102, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.792425838316303, | |
| "learning_rate": 2.377016737796024e-06, | |
| "loss": 0.0752, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.2376840524407164, | |
| "learning_rate": 2.368101231573066e-06, | |
| "loss": 0.0874, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.8324530457880948, | |
| "learning_rate": 2.3591972855541993e-06, | |
| "loss": 0.1181, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.8822020144429574, | |
| "learning_rate": 2.3503049388486187e-06, | |
| "loss": 0.1219, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.2007033775790816, | |
| "learning_rate": 2.34142423051457e-06, | |
| "loss": 0.119, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 16.503760887068537, | |
| "learning_rate": 2.3325551995591723e-06, | |
| "loss": 0.1213, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.7499648921099262, | |
| "learning_rate": 2.3236978849382625e-06, | |
| "loss": 0.0915, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.1132803759697496, | |
| "learning_rate": 2.3148523255562065e-06, | |
| "loss": 0.1555, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.3009606425237226, | |
| "learning_rate": 2.3060185602657443e-06, | |
| "loss": 0.0671, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 3.0363667534903103, | |
| "learning_rate": 2.2971966278678115e-06, | |
| "loss": 0.1471, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.2792526851516404, | |
| "learning_rate": 2.2883865671113637e-06, | |
| "loss": 0.1264, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.2635338410250414, | |
| "learning_rate": 2.279588416693218e-06, | |
| "loss": 0.0808, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.827366143443778, | |
| "learning_rate": 2.2708022152578775e-06, | |
| "loss": 0.1169, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.7243659089253423, | |
| "learning_rate": 2.262028001397355e-06, | |
| "loss": 0.0842, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.881634138113363, | |
| "learning_rate": 2.2532658136510165e-06, | |
| "loss": 0.1045, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.103741160610525, | |
| "learning_rate": 2.244515690505403e-06, | |
| "loss": 0.1071, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.0469803289981146, | |
| "learning_rate": 2.2357776703940613e-06, | |
| "loss": 0.134, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.7133679583049146, | |
| "learning_rate": 2.22705179169738e-06, | |
| "loss": 0.1282, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.1271738970026544, | |
| "learning_rate": 2.218338092742418e-06, | |
| "loss": 0.1637, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.6637756715555911, | |
| "learning_rate": 2.2096366118027385e-06, | |
| "loss": 0.0798, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.6078482696889287, | |
| "learning_rate": 2.200947387098232e-06, | |
| "loss": 0.1207, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.9300362876253738, | |
| "learning_rate": 2.1922704567949643e-06, | |
| "loss": 0.1139, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.4856625951255353, | |
| "learning_rate": 2.183605859004997e-06, | |
| "loss": 0.0949, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.5176974696935166, | |
| "learning_rate": 2.174953631786217e-06, | |
| "loss": 0.09, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.042197288042245, | |
| "learning_rate": 2.166313813142185e-06, | |
| "loss": 0.158, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.0160094322338598, | |
| "learning_rate": 2.1576864410219517e-06, | |
| "loss": 0.1175, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.7635899225729912, | |
| "learning_rate": 2.1490715533199014e-06, | |
| "loss": 0.0939, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.9225389063482508, | |
| "learning_rate": 2.1404691878755845e-06, | |
| "loss": 0.1377, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 3.830475012337844, | |
| "learning_rate": 2.131879382473544e-06, | |
| "loss": 0.0901, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.5548356207069536, | |
| "learning_rate": 2.123302174843161e-06, | |
| "loss": 0.107, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.7800121351929723, | |
| "learning_rate": 2.114737602658476e-06, | |
| "loss": 0.0943, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.8985974516513844, | |
| "learning_rate": 2.1061857035380364e-06, | |
| "loss": 0.1053, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.6179775706907018, | |
| "learning_rate": 2.097646515044724e-06, | |
| "loss": 0.0894, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.9563531354116217, | |
| "learning_rate": 2.089120074685587e-06, | |
| "loss": 0.1232, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.347504434239575, | |
| "learning_rate": 2.080606419911686e-06, | |
| "loss": 0.1309, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.3869140849131103, | |
| "learning_rate": 2.0721055881179154e-06, | |
| "loss": 0.1382, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.6530719214025684, | |
| "learning_rate": 2.063617616642852e-06, | |
| "loss": 0.1091, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.8463484297999817, | |
| "learning_rate": 2.0551425427685862e-06, | |
| "loss": 0.0755, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.4804477467947055, | |
| "learning_rate": 2.0466804037205527e-06, | |
| "loss": 0.1261, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.9933059234486568, | |
| "learning_rate": 2.0382312366673777e-06, | |
| "loss": 0.1438, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.4558824986540313, | |
| "learning_rate": 2.029795078720705e-06, | |
| "loss": 0.0801, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.7568644774027609, | |
| "learning_rate": 2.0213719669350414e-06, | |
| "loss": 0.107, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.610845221208439, | |
| "learning_rate": 2.01296193830759e-06, | |
| "loss": 0.1176, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.8259622221603906, | |
| "learning_rate": 2.004565029778085e-06, | |
| "loss": 0.1005, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.5869296681714593, | |
| "learning_rate": 1.9961812782286384e-06, | |
| "loss": 0.1133, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.099922313185119, | |
| "learning_rate": 1.987810720483566e-06, | |
| "loss": 0.0658, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.9629552453343817, | |
| "learning_rate": 1.979453393309235e-06, | |
| "loss": 0.1434, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.050001164612842, | |
| "learning_rate": 1.9711093334139013e-06, | |
| "loss": 0.1201, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.8072014706090043, | |
| "learning_rate": 1.9627785774475415e-06, | |
| "loss": 0.1543, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.6558315512780304, | |
| "learning_rate": 1.954461162001703e-06, | |
| "loss": 0.0891, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.823622763564889, | |
| "learning_rate": 1.946157123609329e-06, | |
| "loss": 0.105, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.601904934784887, | |
| "learning_rate": 1.9378664987446135e-06, | |
| "loss": 0.1156, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.229821956730354, | |
| "learning_rate": 1.9295893238228316e-06, | |
| "loss": 0.1198, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.893246059611001, | |
| "learning_rate": 1.9213256352001777e-06, | |
| "loss": 0.1559, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.8106645848464975, | |
| "learning_rate": 1.9130754691736157e-06, | |
| "loss": 0.1609, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.4879614809305632, | |
| "learning_rate": 1.9048388619807085e-06, | |
| "loss": 0.0842, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.7694300917166506, | |
| "learning_rate": 1.896615849799467e-06, | |
| "loss": 0.0707, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.3353327364662788, | |
| "learning_rate": 1.8884064687481897e-06, | |
| "loss": 0.072, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.6306354819802187, | |
| "learning_rate": 1.880210754885296e-06, | |
| "loss": 0.0898, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.7707297323164894, | |
| "learning_rate": 1.8720287442091823e-06, | |
| "loss": 0.1469, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.6693648130916285, | |
| "learning_rate": 1.8638604726580479e-06, | |
| "loss": 0.1215, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.777331267067197, | |
| "learning_rate": 1.8557059761097517e-06, | |
| "loss": 0.1044, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 5.2001988352604105, | |
| "learning_rate": 1.8475652903816476e-06, | |
| "loss": 0.1289, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.7577976787597611, | |
| "learning_rate": 1.8394384512304215e-06, | |
| "loss": 0.1365, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.8901050885585722, | |
| "learning_rate": 1.8313254943519482e-06, | |
| "loss": 0.1306, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.854742043632422, | |
| "learning_rate": 1.8232264553811196e-06, | |
| "loss": 0.1261, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 3.698941017540202, | |
| "learning_rate": 1.8151413698917004e-06, | |
| "loss": 0.1625, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.7206558278103523, | |
| "learning_rate": 1.8070702733961676e-06, | |
| "loss": 0.0938, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.7677875559598875, | |
| "learning_rate": 1.7990132013455475e-06, | |
| "loss": 0.1314, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.6453012236522748, | |
| "learning_rate": 1.7909701891292719e-06, | |
| "loss": 0.1049, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.8889911261871926, | |
| "learning_rate": 1.7829412720750172e-06, | |
| "loss": 0.1181, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.8214020256704653, | |
| "learning_rate": 1.774926485448543e-06, | |
| "loss": 0.1221, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.667776340671231, | |
| "learning_rate": 1.7669258644535497e-06, | |
| "loss": 0.1152, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.6599634490047435, | |
| "learning_rate": 1.7589394442315144e-06, | |
| "loss": 0.0937, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.99996195686876, | |
| "learning_rate": 1.7509672598615417e-06, | |
| "loss": 0.1419, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 2.2118355594063708, | |
| "learning_rate": 1.7430093463602021e-06, | |
| "loss": 0.1449, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.8327808051263679, | |
| "learning_rate": 1.7350657386813897e-06, | |
| "loss": 0.1156, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.6042442726020136, | |
| "learning_rate": 1.7271364717161609e-06, | |
| "loss": 0.1496, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.998649435249516, | |
| "learning_rate": 1.7192215802925793e-06, | |
| "loss": 0.1067, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.6141034792802256, | |
| "learning_rate": 1.7113210991755713e-06, | |
| "loss": 0.1178, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.9428946129686242, | |
| "learning_rate": 1.7034350630667628e-06, | |
| "loss": 0.1401, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.6832521348575313, | |
| "learning_rate": 1.6955635066043363e-06, | |
| "loss": 0.1273, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 4.038854894966858, | |
| "learning_rate": 1.6877064643628766e-06, | |
| "loss": 0.1486, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.7326863265701336, | |
| "learning_rate": 1.6798639708532088e-06, | |
| "loss": 0.1034, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.373357985004174, | |
| "learning_rate": 1.6720360605222653e-06, | |
| "loss": 0.0887, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 2.0943429087659404, | |
| "learning_rate": 1.6642227677529149e-06, | |
| "loss": 0.1079, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.6520799301347306, | |
| "learning_rate": 1.656424126863827e-06, | |
| "loss": 0.1097, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.9036469573563441, | |
| "learning_rate": 1.6486401721093143e-06, | |
| "loss": 0.1107, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.5580736483345345, | |
| "learning_rate": 1.6408709376791798e-06, | |
| "loss": 0.0925, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 2.118080025975157, | |
| "learning_rate": 1.6331164576985737e-06, | |
| "loss": 0.0992, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 2.7113308881162417, | |
| "learning_rate": 1.6253767662278346e-06, | |
| "loss": 0.1479, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.6408114347988518, | |
| "learning_rate": 1.6176518972623505e-06, | |
| "loss": 0.0807, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.632998910503062, | |
| "learning_rate": 1.6099418847324022e-06, | |
| "loss": 0.1413, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.810289738754614, | |
| "learning_rate": 1.6022467625030115e-06, | |
| "loss": 0.1218, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.762358379505647, | |
| "learning_rate": 1.5945665643738039e-06, | |
| "loss": 0.1393, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.5628602324627865, | |
| "learning_rate": 1.5869013240788466e-06, | |
| "loss": 0.0907, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.95799804387182, | |
| "learning_rate": 1.5792510752865114e-06, | |
| "loss": 0.1213, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.6700313560006872, | |
| "learning_rate": 1.5716158515993223e-06, | |
| "loss": 0.134, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.019046183613624, | |
| "learning_rate": 1.563995686553803e-06, | |
| "loss": 0.1817, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.7629259020298114, | |
| "learning_rate": 1.556390613620341e-06, | |
| "loss": 0.1636, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.9536751841576872, | |
| "learning_rate": 1.548800666203028e-06, | |
| "loss": 0.1116, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.0814066954978614, | |
| "learning_rate": 1.5412258776395233e-06, | |
| "loss": 0.143, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.180706325778583, | |
| "learning_rate": 1.5336662812009035e-06, | |
| "loss": 0.0748, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.8236403642886168, | |
| "learning_rate": 1.5261219100915115e-06, | |
| "loss": 0.1335, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.5661440703265628, | |
| "learning_rate": 1.5185927974488224e-06, | |
| "loss": 0.1373, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.6203179411585926, | |
| "learning_rate": 1.5110789763432832e-06, | |
| "loss": 0.1257, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.531392583201998, | |
| "learning_rate": 1.5035804797781811e-06, | |
| "loss": 0.1374, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.3189072111660938, | |
| "learning_rate": 1.4960973406894919e-06, | |
| "loss": 0.0834, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.0438901557258724, | |
| "learning_rate": 1.4886295919457317e-06, | |
| "loss": 0.138, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.7428867207404393, | |
| "learning_rate": 1.481177266347823e-06, | |
| "loss": 0.114, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.8447265154099988, | |
| "learning_rate": 1.4737403966289387e-06, | |
| "loss": 0.1253, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.6780724286477957, | |
| "learning_rate": 1.4663190154543683e-06, | |
| "loss": 0.1107, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.2162692268576656, | |
| "learning_rate": 1.4589131554213704e-06, | |
| "loss": 0.1187, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.1732299790030174, | |
| "learning_rate": 1.451522849059025e-06, | |
| "loss": 0.0794, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.5098062263871141, | |
| "learning_rate": 1.444148128828101e-06, | |
| "loss": 0.0916, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.433136044460252, | |
| "learning_rate": 1.4367890271209024e-06, | |
| "loss": 0.0902, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.5675088273366966, | |
| "learning_rate": 1.4294455762611343e-06, | |
| "loss": 0.1293, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.6721476839079745, | |
| "learning_rate": 1.422117808503759e-06, | |
| "loss": 0.1283, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.4516772793399861, | |
| "learning_rate": 1.4148057560348477e-06, | |
| "loss": 0.0961, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.9838232631858521, | |
| "learning_rate": 1.4075094509714526e-06, | |
| "loss": 0.115, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.9665812232773578, | |
| "learning_rate": 1.4002289253614492e-06, | |
| "loss": 0.1168, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.6688597299685144, | |
| "learning_rate": 1.3929642111834114e-06, | |
| "loss": 0.1377, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.5727794307304164, | |
| "learning_rate": 1.3857153403464613e-06, | |
| "loss": 0.1307, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.4649402982348885, | |
| "learning_rate": 1.3784823446901295e-06, | |
| "loss": 0.1069, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.72399909687252, | |
| "learning_rate": 1.3712652559842205e-06, | |
| "loss": 0.1293, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.242003952713536, | |
| "learning_rate": 1.364064105928668e-06, | |
| "loss": 0.114, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.705374708960644, | |
| "learning_rate": 1.356878926153401e-06, | |
| "loss": 0.113, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.6517476509676714, | |
| "learning_rate": 1.3497097482181948e-06, | |
| "loss": 0.0865, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.651682909854867, | |
| "learning_rate": 1.342556603612546e-06, | |
| "loss": 0.091, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.616992396188749, | |
| "learning_rate": 1.3354195237555245e-06, | |
| "loss": 0.1177, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 3.973440183689998, | |
| "learning_rate": 1.3282985399956372e-06, | |
| "loss": 0.0745, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.2399378592144723, | |
| "learning_rate": 1.3211936836106926e-06, | |
| "loss": 0.0853, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.5356433036883095, | |
| "learning_rate": 1.3141049858076648e-06, | |
| "loss": 0.1219, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.1533362310986135, | |
| "learning_rate": 1.3070324777225474e-06, | |
| "loss": 0.1396, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.8636701257772759, | |
| "learning_rate": 1.29997619042023e-06, | |
| "loss": 0.0767, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.219301762559893, | |
| "learning_rate": 1.2929361548943503e-06, | |
| "loss": 0.1646, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.4052352119926677, | |
| "learning_rate": 1.2859124020671643e-06, | |
| "loss": 0.1196, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.203155952648259, | |
| "learning_rate": 1.2789049627894111e-06, | |
| "loss": 0.1355, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.079010510241825, | |
| "learning_rate": 1.2719138678401693e-06, | |
| "loss": 0.0916, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.4166843086588452, | |
| "learning_rate": 1.2649391479267347e-06, | |
| "loss": 0.1051, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 3.4052281200010173, | |
| "learning_rate": 1.2579808336844711e-06, | |
| "loss": 0.1011, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.1195754551679133, | |
| "learning_rate": 1.2510389556766884e-06, | |
| "loss": 0.1152, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.6404708356664435, | |
| "learning_rate": 1.2441135443945023e-06, | |
| "loss": 0.106, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.8634718218136286, | |
| "learning_rate": 1.237204630256697e-06, | |
| "loss": 0.084, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.8180176669915014, | |
| "learning_rate": 1.2303122436096015e-06, | |
| "loss": 0.1217, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.74789232737528, | |
| "learning_rate": 1.2234364147269433e-06, | |
| "loss": 0.1016, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.1100851203934625, | |
| "learning_rate": 1.2165771738097288e-06, | |
| "loss": 0.1056, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.137840760178262, | |
| "learning_rate": 1.209734550986103e-06, | |
| "loss": 0.0817, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.016888381508961, | |
| "learning_rate": 1.2029085763112142e-06, | |
| "loss": 0.1414, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.044827160753601, | |
| "learning_rate": 1.1960992797670935e-06, | |
| "loss": 0.0727, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.2044422944254878, | |
| "learning_rate": 1.1893066912625078e-06, | |
| "loss": 0.0571, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.001239217217173, | |
| "learning_rate": 1.1825308406328435e-06, | |
| "loss": 0.1054, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.898027796170711, | |
| "learning_rate": 1.1757717576399658e-06, | |
| "loss": 0.085, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.866494564536938, | |
| "learning_rate": 1.1690294719720891e-06, | |
| "loss": 0.1069, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.826616801921012, | |
| "learning_rate": 1.1623040132436515e-06, | |
| "loss": 0.1405, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 2.8994861648399652, | |
| "learning_rate": 1.1555954109951772e-06, | |
| "loss": 0.1468, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.7399303106591963, | |
| "learning_rate": 1.1489036946931548e-06, | |
| "loss": 0.1103, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.5618058484821695, | |
| "learning_rate": 1.1422288937299037e-06, | |
| "loss": 0.1147, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.2516405928444252, | |
| "learning_rate": 1.1355710374234414e-06, | |
| "loss": 0.0751, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 2.324978806825241, | |
| "learning_rate": 1.1289301550173643e-06, | |
| "loss": 0.1301, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.9352463582352895, | |
| "learning_rate": 1.122306275680708e-06, | |
| "loss": 0.1336, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.5812187303605587, | |
| "learning_rate": 1.1156994285078287e-06, | |
| "loss": 0.1127, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 2.7723759082237054, | |
| "learning_rate": 1.1091096425182719e-06, | |
| "loss": 0.076, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.5841351312799044, | |
| "learning_rate": 1.1025369466566394e-06, | |
| "loss": 0.112, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.529260110516267, | |
| "learning_rate": 1.0959813697924743e-06, | |
| "loss": 0.1328, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.763156092795216, | |
| "learning_rate": 1.0894429407201207e-06, | |
| "loss": 0.115, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.5513777752156226, | |
| "learning_rate": 1.082921688158608e-06, | |
| "loss": 0.0888, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.8994960689856308, | |
| "learning_rate": 1.0764176407515203e-06, | |
| "loss": 0.1229, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.4517647924703174, | |
| "learning_rate": 1.0699308270668667e-06, | |
| "loss": 0.0829, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.6328827890997333, | |
| "learning_rate": 1.0634612755969665e-06, | |
| "loss": 0.0929, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.0925694894155447, | |
| "learning_rate": 1.0570090147583089e-06, | |
| "loss": 0.1319, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.5999398316628513, | |
| "learning_rate": 1.050574072891445e-06, | |
| "loss": 0.099, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.0689198706320813, | |
| "learning_rate": 1.044156478260851e-06, | |
| "loss": 0.1131, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.4542505060532096, | |
| "learning_rate": 1.0377562590548067e-06, | |
| "loss": 0.1119, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.1296698994205494, | |
| "learning_rate": 1.031373443385278e-06, | |
| "loss": 0.1424, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.849387025827355, | |
| "learning_rate": 1.025008059287782e-06, | |
| "loss": 0.1279, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.111837408928748, | |
| "learning_rate": 1.0186601347212756e-06, | |
| "loss": 0.1384, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.9271949909614214, | |
| "learning_rate": 1.0123296975680263e-06, | |
| "loss": 0.1087, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.329230313694751, | |
| "learning_rate": 1.0060167756334876e-06, | |
| "loss": 0.1289, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.7572601092475966, | |
| "learning_rate": 9.997213966461843e-07, | |
| "loss": 0.1203, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.1928031187601045, | |
| "learning_rate": 9.934435882575849e-07, | |
| "loss": 0.1436, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.89964134222432, | |
| "learning_rate": 9.871833780419827e-07, | |
| "loss": 0.1532, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.6273782226263336, | |
| "learning_rate": 9.809407934963705e-07, | |
| "loss": 0.1361, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.616377832330268, | |
| "learning_rate": 9.747158620403274e-07, | |
| "loss": 0.0965, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.7615100331467808, | |
| "learning_rate": 9.685086110158926e-07, | |
| "loss": 0.1186, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.1874223303571698, | |
| "learning_rate": 9.623190676874438e-07, | |
| "loss": 0.0801, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.6707289299229628, | |
| "learning_rate": 9.561472592415849e-07, | |
| "loss": 0.1015, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.703153506821708, | |
| "learning_rate": 9.499932127870209e-07, | |
| "loss": 0.1144, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.8021171145691988, | |
| "learning_rate": 9.438569553544369e-07, | |
| "loss": 0.1236, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.5572935030368558, | |
| "learning_rate": 9.377385138963868e-07, | |
| "loss": 0.0957, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.5329776444898073, | |
| "learning_rate": 9.316379152871668e-07, | |
| "loss": 0.118, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.8036771947979402, | |
| "learning_rate": 9.255551863227041e-07, | |
| "loss": 0.1209, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.3860222366876078, | |
| "learning_rate": 9.194903537204363e-07, | |
| "loss": 0.1009, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 4.511454379513255, | |
| "learning_rate": 9.134434441191908e-07, | |
| "loss": 0.1137, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.4241637994362393, | |
| "learning_rate": 9.074144840790755e-07, | |
| "loss": 0.097, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.5641033428478222, | |
| "learning_rate": 9.014035000813531e-07, | |
| "loss": 0.0706, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.0497581640328453, | |
| "learning_rate": 8.954105185283324e-07, | |
| "loss": 0.1037, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 3.3381709134091255, | |
| "learning_rate": 8.894355657432497e-07, | |
| "loss": 0.0807, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.9154332559676057, | |
| "learning_rate": 8.83478667970149e-07, | |
| "loss": 0.103, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.5683413366039711, | |
| "learning_rate": 8.775398513737754e-07, | |
| "loss": 0.0868, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.5479216699244795, | |
| "learning_rate": 8.71619142039451e-07, | |
| "loss": 0.094, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.1517933989870968, | |
| "learning_rate": 8.657165659729671e-07, | |
| "loss": 0.1104, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.801198378747815, | |
| "learning_rate": 8.59832149100468e-07, | |
| "loss": 0.1238, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.573203527001529, | |
| "learning_rate": 8.539659172683329e-07, | |
| "loss": 0.0835, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.0257387197632575, | |
| "learning_rate": 8.481178962430713e-07, | |
| "loss": 0.1047, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.7341343767324775, | |
| "learning_rate": 8.422881117111987e-07, | |
| "loss": 0.0955, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.084835594489109, | |
| "learning_rate": 8.364765892791349e-07, | |
| "loss": 0.1051, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.449522355959983, | |
| "learning_rate": 8.306833544730841e-07, | |
| "loss": 0.0871, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.5784050635904436, | |
| "learning_rate": 8.249084327389234e-07, | |
| "loss": 0.0918, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.6653894058429495, | |
| "learning_rate": 8.191518494420953e-07, | |
| "loss": 0.1005, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.1644391236635108, | |
| "learning_rate": 8.134136298674933e-07, | |
| "loss": 0.1258, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.7465947018602068, | |
| "learning_rate": 8.076937992193478e-07, | |
| "loss": 0.0958, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.7951316250275726, | |
| "learning_rate": 8.01992382621124e-07, | |
| "loss": 0.1203, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.8552742016937045, | |
| "learning_rate": 7.963094051154014e-07, | |
| "loss": 0.0919, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.5275354336077738, | |
| "learning_rate": 7.906448916637705e-07, | |
| "loss": 0.0855, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.3276563751497337, | |
| "learning_rate": 7.84998867146724e-07, | |
| "loss": 0.0906, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 2.1777455379281667, | |
| "learning_rate": 7.793713563635396e-07, | |
| "loss": 0.1345, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 2.6139708215357214, | |
| "learning_rate": 7.737623840321811e-07, | |
| "loss": 0.1427, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.696636849936864, | |
| "learning_rate": 7.681719747891813e-07, | |
| "loss": 0.1131, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 3.0304350301117027, | |
| "learning_rate": 7.6260015318954e-07, | |
| "loss": 0.0966, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.3796777684299342, | |
| "learning_rate": 7.570469437066147e-07, | |
| "loss": 0.1111, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.9772318815498195, | |
| "learning_rate": 7.515123707320071e-07, | |
| "loss": 0.0926, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.6538203453203246, | |
| "learning_rate": 7.45996458575467e-07, | |
| "loss": 0.1158, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.2605541600335792, | |
| "learning_rate": 7.404992314647746e-07, | |
| "loss": 0.0995, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.5321706969606996, | |
| "learning_rate": 7.350207135456416e-07, | |
| "loss": 0.1232, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 2.138546185384863, | |
| "learning_rate": 7.295609288816041e-07, | |
| "loss": 0.0928, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.9656824274684142, | |
| "learning_rate": 7.241199014539097e-07, | |
| "loss": 0.1355, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.7290513121096527, | |
| "learning_rate": 7.186976551614233e-07, | |
| "loss": 0.1299, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.9548477721792044, | |
| "learning_rate": 7.132942138205135e-07, | |
| "loss": 0.1151, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.6401086744005244, | |
| "learning_rate": 7.079096011649522e-07, | |
| "loss": 0.1259, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.8189732666722285, | |
| "learning_rate": 7.025438408458107e-07, | |
| "loss": 0.0989, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.6405758936091988, | |
| "learning_rate": 6.971969564313507e-07, | |
| "loss": 0.1013, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 2.1986403624499253, | |
| "learning_rate": 6.918689714069282e-07, | |
| "loss": 0.119, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.6237565653249362, | |
| "learning_rate": 6.865599091748826e-07, | |
| "loss": 0.1073, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.7584973484983841, | |
| "learning_rate": 6.81269793054442e-07, | |
| "loss": 0.1034, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.6483916129764982, | |
| "learning_rate": 6.759986462816142e-07, | |
| "loss": 0.1214, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.6732827265357355, | |
| "learning_rate": 6.707464920090895e-07, | |
| "loss": 0.1001, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.9849705649558909, | |
| "learning_rate": 6.65513353306132e-07, | |
| "loss": 0.1374, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.7522748414091973, | |
| "learning_rate": 6.602992531584873e-07, | |
| "loss": 0.1043, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 2.006451353252143, | |
| "learning_rate": 6.551042144682763e-07, | |
| "loss": 0.1329, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.8847396234732365, | |
| "learning_rate": 6.49928260053893e-07, | |
| "loss": 0.088, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.8113950911459717, | |
| "learning_rate": 6.447714126499088e-07, | |
| "loss": 0.1302, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 2.076649654393772, | |
| "learning_rate": 6.39633694906972e-07, | |
| "loss": 0.1177, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.5934392313053296, | |
| "learning_rate": 6.345151293917023e-07, | |
| "loss": 0.089, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.905616552986884, | |
| "learning_rate": 6.294157385866007e-07, | |
| "loss": 0.1389, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.605376887676223, | |
| "learning_rate": 6.243355448899447e-07, | |
| "loss": 0.1048, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.3039710961110615, | |
| "learning_rate": 6.192745706156894e-07, | |
| "loss": 0.0626, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 3.559131515931037, | |
| "learning_rate": 6.14232837993376e-07, | |
| "loss": 0.1308, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.2536324978892384, | |
| "learning_rate": 6.092103691680246e-07, | |
| "loss": 0.1463, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.6071289534363706, | |
| "learning_rate": 6.042071862000465e-07, | |
| "loss": 0.0981, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.6910196306241885, | |
| "learning_rate": 5.992233110651413e-07, | |
| "loss": 0.0904, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.905303264227101, | |
| "learning_rate": 5.942587656542004e-07, | |
| "loss": 0.1156, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.4674031858202814, | |
| "learning_rate": 5.893135717732157e-07, | |
| "loss": 0.1301, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.168788921263074, | |
| "learning_rate": 5.843877511431761e-07, | |
| "loss": 0.1029, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.6230153532575458, | |
| "learning_rate": 5.79481325399981e-07, | |
| "loss": 0.0932, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 3.9422581189127146, | |
| "learning_rate": 5.745943160943385e-07, | |
| "loss": 0.1223, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.318704383644754, | |
| "learning_rate": 5.697267446916727e-07, | |
| "loss": 0.11, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.520375714262893, | |
| "learning_rate": 5.648786325720313e-07, | |
| "loss": 0.0848, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.9331430191129737, | |
| "learning_rate": 5.600500010299881e-07, | |
| "loss": 0.0928, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.508072126931876, | |
| "learning_rate": 5.552408712745533e-07, | |
| "loss": 0.0905, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.815138714776867, | |
| "learning_rate": 5.504512644290788e-07, | |
| "loss": 0.1155, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.731977717124734, | |
| "learning_rate": 5.456812015311624e-07, | |
| "loss": 0.1131, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.0820938949916226, | |
| "learning_rate": 5.409307035325618e-07, | |
| "loss": 0.0907, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 4.8741872841013345, | |
| "learning_rate": 5.361997912990957e-07, | |
| "loss": 0.1042, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.8501234480047337, | |
| "learning_rate": 5.314884856105573e-07, | |
| "loss": 0.1003, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.4600368165176527, | |
| "learning_rate": 5.267968071606222e-07, | |
| "loss": 0.1102, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.0208377111093574, | |
| "learning_rate": 5.221247765567528e-07, | |
| "loss": 0.0971, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.7829392216950242, | |
| "learning_rate": 5.174724143201148e-07, | |
| "loss": 0.1078, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.756667315342706, | |
| "learning_rate": 5.128397408854813e-07, | |
| "loss": 0.1201, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.7607102414398703, | |
| "learning_rate": 5.082267766011467e-07, | |
| "loss": 0.0615, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.7279397201663833, | |
| "learning_rate": 5.036335417288374e-07, | |
| "loss": 0.1239, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.3223287090761953, | |
| "learning_rate": 4.990600564436177e-07, | |
| "loss": 0.0857, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.915742325850939, | |
| "learning_rate": 4.945063408338085e-07, | |
| "loss": 0.0959, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.3073053408337982, | |
| "learning_rate": 4.899724149008933e-07, | |
| "loss": 0.0815, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.1067359417427327, | |
| "learning_rate": 4.854582985594336e-07, | |
| "loss": 0.107, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.3702086647301606, | |
| "learning_rate": 4.809640116369807e-07, | |
| "loss": 0.0718, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.699005590583642, | |
| "learning_rate": 4.7648957387398663e-07, | |
| "loss": 0.0888, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.8006153872330963, | |
| "learning_rate": 4.72035004923721e-07, | |
| "loss": 0.1079, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.5752108585132742, | |
| "learning_rate": 4.6760032435218094e-07, | |
| "loss": 0.1028, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.5523323756196192, | |
| "learning_rate": 4.6318555163800794e-07, | |
| "loss": 0.082, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.581535153622859, | |
| "learning_rate": 4.587907061724034e-07, | |
| "loss": 0.1177, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 3.386615267018464, | |
| "learning_rate": 4.54415807259036e-07, | |
| "loss": 0.1077, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.7675094550376917, | |
| "learning_rate": 4.500608741139689e-07, | |
| "loss": 0.1226, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 2.107225110659914, | |
| "learning_rate": 4.4572592586556274e-07, | |
| "loss": 0.1256, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 4.534264455196776, | |
| "learning_rate": 4.41410981554401e-07, | |
| "loss": 0.078, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.4464350919004405, | |
| "learning_rate": 4.371160601332042e-07, | |
| "loss": 0.0875, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 4.171917408741793, | |
| "learning_rate": 4.328411804667415e-07, | |
| "loss": 0.0976, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.651495115764142, | |
| "learning_rate": 4.2858636133175537e-07, | |
| "loss": 0.0912, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.411017646466787, | |
| "learning_rate": 4.2435162141687194e-07, | |
| "loss": 0.1018, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.6052992507261568, | |
| "learning_rate": 4.2013697932252627e-07, | |
| "loss": 0.1046, | |
| "step": 1349 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.7365312052470743, | |
| "learning_rate": 4.159424535608747e-07, | |
| "loss": 0.1125, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.6771179899356692, | |
| "learning_rate": 4.1176806255571746e-07, | |
| "loss": 0.0767, | |
| "step": 1351 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.9218646131425015, | |
| "learning_rate": 4.0761382464241275e-07, | |
| "loss": 0.1195, | |
| "step": 1352 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.079269658569359, | |
| "learning_rate": 4.0347975806780184e-07, | |
| "loss": 0.0975, | |
| "step": 1353 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.8693170141266313, | |
| "learning_rate": 3.99365880990128e-07, | |
| "loss": 0.1234, | |
| "step": 1354 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.761622569709359, | |
| "learning_rate": 3.9527221147895144e-07, | |
| "loss": 0.0927, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.4891796824023398, | |
| "learning_rate": 3.911987675150763e-07, | |
| "loss": 0.0761, | |
| "step": 1356 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.9547443620936422, | |
| "learning_rate": 3.8714556699047054e-07, | |
| "loss": 0.0985, | |
| "step": 1357 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.71508462863976, | |
| "learning_rate": 3.831126277081809e-07, | |
| "loss": 0.0972, | |
| "step": 1358 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.1865647338107532, | |
| "learning_rate": 3.7909996738226537e-07, | |
| "loss": 0.0926, | |
| "step": 1359 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.9869812144690924, | |
| "learning_rate": 3.7510760363770714e-07, | |
| "loss": 0.1033, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.785541452657883, | |
| "learning_rate": 3.711355540103395e-07, | |
| "loss": 0.1286, | |
| "step": 1361 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 2.0581948981674167, | |
| "learning_rate": 3.671838359467705e-07, | |
| "loss": 0.116, | |
| "step": 1362 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.6150202050168858, | |
| "learning_rate": 3.63252466804303e-07, | |
| "loss": 0.0981, | |
| "step": 1363 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.6139054545514815, | |
| "learning_rate": 3.59341463850863e-07, | |
| "loss": 0.0851, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.9360777170557844, | |
| "learning_rate": 3.5545084426491963e-07, | |
| "loss": 0.1155, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.8506126432274976, | |
| "learning_rate": 3.5158062513541036e-07, | |
| "loss": 0.0763, | |
| "step": 1366 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.968890913075809, | |
| "learning_rate": 3.4773082346167e-07, | |
| "loss": 0.1267, | |
| "step": 1367 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.673222442493309, | |
| "learning_rate": 3.439014561533488e-07, | |
| "loss": 0.1033, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.0098300835751193, | |
| "learning_rate": 3.400925400303451e-07, | |
| "loss": 0.1317, | |
| "step": 1369 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.8276152882241996, | |
| "learning_rate": 3.3630409182272896e-07, | |
| "loss": 0.0835, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.392229319969008, | |
| "learning_rate": 3.325361281706646e-07, | |
| "loss": 0.0786, | |
| "step": 1371 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.0259190953996344, | |
| "learning_rate": 3.2878866562434666e-07, | |
| "loss": 0.0957, | |
| "step": 1372 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.3994973339817043, | |
| "learning_rate": 3.2506172064391605e-07, | |
| "loss": 0.0706, | |
| "step": 1373 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.9425773637759582, | |
| "learning_rate": 3.2135530959939767e-07, | |
| "loss": 0.1076, | |
| "step": 1374 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.5964359732182867, | |
| "learning_rate": 3.1766944877062387e-07, | |
| "loss": 0.1041, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.6663126596565565, | |
| "learning_rate": 3.1400415434716e-07, | |
| "loss": 0.1679, | |
| "step": 1376 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.9689921232382654, | |
| "learning_rate": 3.1035944242824077e-07, | |
| "loss": 0.0927, | |
| "step": 1377 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.9496388529441202, | |
| "learning_rate": 3.0673532902269323e-07, | |
| "loss": 0.1552, | |
| "step": 1378 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.433647386785067, | |
| "learning_rate": 3.0313183004886894e-07, | |
| "loss": 0.0704, | |
| "step": 1379 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.7221556497886636, | |
| "learning_rate": 2.9954896133457536e-07, | |
| "loss": 0.0895, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.813340673979937, | |
| "learning_rate": 2.959867386170018e-07, | |
| "loss": 0.1305, | |
| "step": 1381 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.8887367666950123, | |
| "learning_rate": 2.9244517754265624e-07, | |
| "loss": 0.1116, | |
| "step": 1382 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.724113628132934, | |
| "learning_rate": 2.889242936672915e-07, | |
| "loss": 0.1058, | |
| "step": 1383 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.4971417466355235, | |
| "learning_rate": 2.8542410245583965e-07, | |
| "loss": 0.1513, | |
| "step": 1384 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.944450344851451, | |
| "learning_rate": 2.819446192823455e-07, | |
| "loss": 0.1277, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.107987311880698, | |
| "learning_rate": 2.784858594298928e-07, | |
| "loss": 0.1376, | |
| "step": 1386 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.9120369570817841, | |
| "learning_rate": 2.7504783809054524e-07, | |
| "loss": 0.103, | |
| "step": 1387 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.9866422051792438, | |
| "learning_rate": 2.7163057036527386e-07, | |
| "loss": 0.0783, | |
| "step": 1388 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.7235673574630885, | |
| "learning_rate": 2.682340712638931e-07, | |
| "loss": 0.1052, | |
| "step": 1389 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.5155437133847502, | |
| "learning_rate": 2.6485835570499496e-07, | |
| "loss": 0.1111, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.4465984926363034, | |
| "learning_rate": 2.615034385158821e-07, | |
| "loss": 0.1096, | |
| "step": 1391 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.9410060852090447, | |
| "learning_rate": 2.581693344325048e-07, | |
| "loss": 0.0983, | |
| "step": 1392 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.021116437716017, | |
| "learning_rate": 2.5485605809939253e-07, | |
| "loss": 0.1355, | |
| "step": 1393 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.18020026790015, | |
| "learning_rate": 2.5156362406959564e-07, | |
| "loss": 0.078, | |
| "step": 1394 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.0017179172037247, | |
| "learning_rate": 2.4829204680461626e-07, | |
| "loss": 0.0933, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.1818146072860545, | |
| "learning_rate": 2.450413406743446e-07, | |
| "loss": 0.1163, | |
| "step": 1396 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.5819291262050605, | |
| "learning_rate": 2.4181151995700146e-07, | |
| "loss": 0.1078, | |
| "step": 1397 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.736708968598104, | |
| "learning_rate": 2.386025988390678e-07, | |
| "loss": 0.1283, | |
| "step": 1398 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.9280851996846244, | |
| "learning_rate": 2.3541459141522894e-07, | |
| "loss": 0.1502, | |
| "step": 1399 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.035864827975166, | |
| "learning_rate": 2.322475116883105e-07, | |
| "loss": 0.1535, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.091516257516916, | |
| "learning_rate": 2.2910137356921325e-07, | |
| "loss": 0.1316, | |
| "step": 1401 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.7329390530290005, | |
| "learning_rate": 2.2597619087685874e-07, | |
| "loss": 0.1017, | |
| "step": 1402 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.648710415442816, | |
| "learning_rate": 2.2287197733812383e-07, | |
| "loss": 0.1234, | |
| "step": 1403 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.8801767205230242, | |
| "learning_rate": 2.197887465877796e-07, | |
| "loss": 0.106, | |
| "step": 1404 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.4917355080321695, | |
| "learning_rate": 2.1672651216843698e-07, | |
| "loss": 0.1018, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5157369357883248, | |
| "learning_rate": 2.1368528753048234e-07, | |
| "loss": 0.0852, | |
| "step": 1406 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5686151676879427, | |
| "learning_rate": 2.1066508603201862e-07, | |
| "loss": 0.08, | |
| "step": 1407 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5800995132819375, | |
| "learning_rate": 2.0766592093880934e-07, | |
| "loss": 0.108, | |
| "step": 1408 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.8731292418109589, | |
| "learning_rate": 2.046878054242196e-07, | |
| "loss": 0.1165, | |
| "step": 1409 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.625115047806522, | |
| "learning_rate": 2.017307525691542e-07, | |
| "loss": 0.122, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.8551925821603361, | |
| "learning_rate": 1.9879477536200786e-07, | |
| "loss": 0.0919, | |
| "step": 1411 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5373585296146006, | |
| "learning_rate": 1.9587988669860113e-07, | |
| "loss": 0.1327, | |
| "step": 1412 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.6368123757454525, | |
| "learning_rate": 1.9298609938212641e-07, | |
| "loss": 0.1024, | |
| "step": 1413 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.5102918071695266, | |
| "learning_rate": 1.9011342612309248e-07, | |
| "loss": 0.1014, | |
| "step": 1414 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.928399186896058, | |
| "learning_rate": 1.8726187953926833e-07, | |
| "loss": 0.1018, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.6015556491974345, | |
| "learning_rate": 1.8443147215562617e-07, | |
| "loss": 0.0999, | |
| "step": 1416 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.0479629218603264, | |
| "learning_rate": 1.8162221640428912e-07, | |
| "loss": 0.107, | |
| "step": 1417 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.336348686630352, | |
| "learning_rate": 1.7883412462447292e-07, | |
| "loss": 0.1599, | |
| "step": 1418 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 3.6875419627180652, | |
| "learning_rate": 1.760672090624349e-07, | |
| "loss": 0.1338, | |
| "step": 1419 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.3436701858234987, | |
| "learning_rate": 1.733214818714213e-07, | |
| "loss": 0.1008, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.0254065183126375, | |
| "learning_rate": 1.7059695511160712e-07, | |
| "loss": 0.1186, | |
| "step": 1421 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.7256603704001379, | |
| "learning_rate": 1.678936407500531e-07, | |
| "loss": 0.1033, | |
| "step": 1422 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.847803376270568, | |
| "learning_rate": 1.6521155066064276e-07, | |
| "loss": 0.0877, | |
| "step": 1423 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.5500932148117843, | |
| "learning_rate": 1.6255069662403865e-07, | |
| "loss": 0.0858, | |
| "step": 1424 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 2.18652420380819, | |
| "learning_rate": 1.599110903276274e-07, | |
| "loss": 0.1018, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.6021945415397538, | |
| "learning_rate": 1.5729274336546584e-07, | |
| "loss": 0.1168, | |
| "step": 1426 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 3.3055029012521335, | |
| "learning_rate": 1.5469566723823493e-07, | |
| "loss": 0.1058, | |
| "step": 1427 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 4.68487165673662, | |
| "learning_rate": 1.5211987335318424e-07, | |
| "loss": 0.1075, | |
| "step": 1428 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.9419405460987873, | |
| "learning_rate": 1.4956537302408703e-07, | |
| "loss": 0.0961, | |
| "step": 1429 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.6832498033387395, | |
| "learning_rate": 1.4703217747118748e-07, | |
| "loss": 0.1008, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.0905006447579084, | |
| "learning_rate": 1.4452029782114962e-07, | |
| "loss": 0.0761, | |
| "step": 1431 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.6638727887584686, | |
| "learning_rate": 1.4202974510701352e-07, | |
| "loss": 0.1254, | |
| "step": 1432 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.8394491514748785, | |
| "learning_rate": 1.3956053026814242e-07, | |
| "loss": 0.0913, | |
| "step": 1433 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.695184906190245, | |
| "learning_rate": 1.371126641501763e-07, | |
| "loss": 0.1401, | |
| "step": 1434 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.090555292874061, | |
| "learning_rate": 1.346861575049857e-07, | |
| "loss": 0.0738, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.4368996437936123, | |
| "learning_rate": 1.3228102099062055e-07, | |
| "loss": 0.098, | |
| "step": 1436 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.10070004440555, | |
| "learning_rate": 1.298972651712682e-07, | |
| "loss": 0.111, | |
| "step": 1437 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.7671896818019424, | |
| "learning_rate": 1.2753490051720273e-07, | |
| "loss": 0.1162, | |
| "step": 1438 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.8504396251495214, | |
| "learning_rate": 1.251939374047423e-07, | |
| "loss": 0.0936, | |
| "step": 1439 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.9012722618600095, | |
| "learning_rate": 1.2287438611620185e-07, | |
| "loss": 0.1283, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.843998247236543, | |
| "learning_rate": 1.2057625683984776e-07, | |
| "loss": 0.1705, | |
| "step": 1441 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.7252084874321323, | |
| "learning_rate": 1.1829955966985385e-07, | |
| "loss": 0.1007, | |
| "step": 1442 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.5529314859786916, | |
| "learning_rate": 1.1604430460625593e-07, | |
| "loss": 0.0603, | |
| "step": 1443 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.963948826434045, | |
| "learning_rate": 1.1381050155491013e-07, | |
| "loss": 0.1292, | |
| "step": 1444 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.8390891040629755, | |
| "learning_rate": 1.1159816032744742e-07, | |
| "loss": 0.1179, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 8.485864391698009, | |
| "learning_rate": 1.094072906412308e-07, | |
| "loss": 0.1418, | |
| "step": 1446 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.9376692960274544, | |
| "learning_rate": 1.0723790211931318e-07, | |
| "loss": 0.0898, | |
| "step": 1447 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.41456782126224, | |
| "learning_rate": 1.0509000429039462e-07, | |
| "loss": 0.1479, | |
| "step": 1448 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.689576167778868, | |
| "learning_rate": 1.0296360658878013e-07, | |
| "loss": 0.1314, | |
| "step": 1449 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.7379030872650063, | |
| "learning_rate": 1.0085871835434025e-07, | |
| "loss": 0.1169, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.9275633567974124, | |
| "learning_rate": 9.87753488324672e-08, | |
| "loss": 0.09, | |
| "step": 1451 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.3179852772495493, | |
| "learning_rate": 9.671350717403605e-08, | |
| "loss": 0.1221, | |
| "step": 1452 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.4098404189265352, | |
| "learning_rate": 9.46732024353636e-08, | |
| "loss": 0.087, | |
| "step": 1453 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.2314538937236406, | |
| "learning_rate": 9.265444357817066e-08, | |
| "loss": 0.117, | |
| "step": 1454 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.0505181997424025, | |
| "learning_rate": 9.065723946953986e-08, | |
| "loss": 0.1495, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.7905067116279876, | |
| "learning_rate": 8.868159888187733e-08, | |
| "loss": 0.1193, | |
| "step": 1456 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.81799273893074, | |
| "learning_rate": 8.672753049287664e-08, | |
| "loss": 0.1203, | |
| "step": 1457 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.890868232522128, | |
| "learning_rate": 8.479504288547769e-08, | |
| "loss": 0.1222, | |
| "step": 1458 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.5163383931955312, | |
| "learning_rate": 8.288414454782901e-08, | |
| "loss": 0.0799, | |
| "step": 1459 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 2.3767272905844417, | |
| "learning_rate": 8.099484387325496e-08, | |
| "loss": 0.1081, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.7287779946935522, | |
| "learning_rate": 7.912714916021247e-08, | |
| "loss": 0.1233, | |
| "step": 1461 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.6267375792799417, | |
| "learning_rate": 7.728106861225881e-08, | |
| "loss": 0.15, | |
| "step": 1462 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.9028146859955775, | |
| "learning_rate": 7.545661033801388e-08, | |
| "loss": 0.1214, | |
| "step": 1463 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.4298206498200186, | |
| "learning_rate": 7.365378235112686e-08, | |
| "loss": 0.0675, | |
| "step": 1464 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.8210087293673047, | |
| "learning_rate": 7.187259257023682e-08, | |
| "loss": 0.1221, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.9812606141566764, | |
| "learning_rate": 7.011304881894166e-08, | |
| "loss": 0.1239, | |
| "step": 1466 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 4.785272114417367, | |
| "learning_rate": 6.837515882576307e-08, | |
| "loss": 0.1048, | |
| "step": 1467 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.215464996164515, | |
| "learning_rate": 6.665893022411107e-08, | |
| "loss": 0.1775, | |
| "step": 1468 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.203008183595577, | |
| "learning_rate": 6.496437055225069e-08, | |
| "loss": 0.1211, | |
| "step": 1469 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.8000601715434712, | |
| "learning_rate": 6.329148725327194e-08, | |
| "loss": 0.1299, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.5637951655305367, | |
| "learning_rate": 6.164028767505214e-08, | |
| "loss": 0.0834, | |
| "step": 1471 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 3.2653968543895533, | |
| "learning_rate": 6.001077907022812e-08, | |
| "loss": 0.085, | |
| "step": 1472 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.5580858576013026, | |
| "learning_rate": 5.840296859616179e-08, | |
| "loss": 0.1167, | |
| "step": 1473 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.185376329825769, | |
| "learning_rate": 5.68168633149091e-08, | |
| "loss": 0.1184, | |
| "step": 1474 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.2645285125903305, | |
| "learning_rate": 5.525247019319002e-08, | |
| "loss": 0.129, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.7106023406707447, | |
| "learning_rate": 5.3709796102356385e-08, | |
| "loss": 0.0788, | |
| "step": 1476 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.3380603961586757, | |
| "learning_rate": 5.218884781836297e-08, | |
| "loss": 0.1284, | |
| "step": 1477 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.512720892675652, | |
| "learning_rate": 5.068963202173871e-08, | |
| "loss": 0.1356, | |
| "step": 1478 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.4871931979530277, | |
| "learning_rate": 4.92121552975533e-08, | |
| "loss": 0.1214, | |
| "step": 1479 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.7640991609880372, | |
| "learning_rate": 4.775642413539339e-08, | |
| "loss": 0.1143, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.7031338085396708, | |
| "learning_rate": 4.63224449293298e-08, | |
| "loss": 0.1065, | |
| "step": 1481 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.3246872570618125, | |
| "learning_rate": 4.4910223977892574e-08, | |
| "loss": 0.0979, | |
| "step": 1482 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.6097338260347043, | |
| "learning_rate": 4.351976748404208e-08, | |
| "loss": 0.1167, | |
| "step": 1483 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.0636912682383777, | |
| "learning_rate": 4.215108155514014e-08, | |
| "loss": 0.1263, | |
| "step": 1484 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 5.961857851727143, | |
| "learning_rate": 4.080417220292676e-08, | |
| "loss": 0.1154, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.3164583272195585, | |
| "learning_rate": 3.9479045343489565e-08, | |
| "loss": 0.0697, | |
| "step": 1486 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.567057715269293, | |
| "learning_rate": 3.817570679724269e-08, | |
| "loss": 0.0815, | |
| "step": 1487 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.8112893939559034, | |
| "learning_rate": 3.6894162288896864e-08, | |
| "loss": 0.1435, | |
| "step": 1488 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.8971429929466814, | |
| "learning_rate": 3.56344174474349e-08, | |
| "loss": 0.1706, | |
| "step": 1489 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.924060975672454, | |
| "learning_rate": 3.439647780609068e-08, | |
| "loss": 0.1051, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.6477043584675648, | |
| "learning_rate": 3.318034880231968e-08, | |
| "loss": 0.1199, | |
| "step": 1491 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.518455162450937, | |
| "learning_rate": 3.198603577777848e-08, | |
| "loss": 0.095, | |
| "step": 1492 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.778985124002772, | |
| "learning_rate": 3.081354397830083e-08, | |
| "loss": 0.091, | |
| "step": 1493 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.5009023960275205, | |
| "learning_rate": 2.9662878553873286e-08, | |
| "loss": 0.0883, | |
| "step": 1494 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.5496874067735975, | |
| "learning_rate": 2.853404455861519e-08, | |
| "loss": 0.094, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.7150317244996973, | |
| "learning_rate": 2.742704695075149e-08, | |
| "loss": 0.1335, | |
| "step": 1496 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.5255335098132747, | |
| "learning_rate": 2.634189059259773e-08, | |
| "loss": 0.0768, | |
| "step": 1497 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.6059908970378205, | |
| "learning_rate": 2.5278580250533426e-08, | |
| "loss": 0.0926, | |
| "step": 1498 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.1013819398925992, | |
| "learning_rate": 2.4237120594982623e-08, | |
| "loss": 0.0849, | |
| "step": 1499 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.064549400690215, | |
| "learning_rate": 2.3217516200394474e-08, | |
| "loss": 0.1496, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.503585036914316, | |
| "learning_rate": 2.221977154522159e-08, | |
| "loss": 0.1173, | |
| "step": 1501 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 2.6924026786355353, | |
| "learning_rate": 2.124389101190172e-08, | |
| "loss": 0.126, | |
| "step": 1502 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.3013906000929985, | |
| "learning_rate": 2.028987888683831e-08, | |
| "loss": 0.0807, | |
| "step": 1503 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 3.561526266894261, | |
| "learning_rate": 1.935773936037888e-08, | |
| "loss": 0.1269, | |
| "step": 1504 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.8204583155645853, | |
| "learning_rate": 1.8447476526802787e-08, | |
| "loss": 0.095, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.6129658981176747, | |
| "learning_rate": 1.755909438429515e-08, | |
| "loss": 0.1419, | |
| "step": 1506 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 3.9525112868271206, | |
| "learning_rate": 1.6692596834937402e-08, | |
| "loss": 0.1011, | |
| "step": 1507 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.5166569607658893, | |
| "learning_rate": 1.584798768468343e-08, | |
| "loss": 0.1127, | |
| "step": 1508 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.9131305609595923, | |
| "learning_rate": 1.502527064334791e-08, | |
| "loss": 0.1484, | |
| "step": 1509 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.9633908750208668, | |
| "learning_rate": 1.422444932458633e-08, | |
| "loss": 0.1188, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.7438826669306713, | |
| "learning_rate": 1.3445527245881108e-08, | |
| "loss": 0.1087, | |
| "step": 1511 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 7.407030042161492, | |
| "learning_rate": 1.2688507828526042e-08, | |
| "loss": 0.1052, | |
| "step": 1512 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.677863053038089, | |
| "learning_rate": 1.1953394397610784e-08, | |
| "loss": 0.1456, | |
| "step": 1513 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.8106192549070852, | |
| "learning_rate": 1.1240190182005838e-08, | |
| "loss": 0.0885, | |
| "step": 1514 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.9341430041133112, | |
| "learning_rate": 1.0548898314349798e-08, | |
| "loss": 0.138, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.710589815924763, | |
| "learning_rate": 9.879521831033801e-09, | |
| "loss": 0.1021, | |
| "step": 1516 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.3493401007508228, | |
| "learning_rate": 9.232063672188762e-09, | |
| "loss": 0.0788, | |
| "step": 1517 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.562870179187617, | |
| "learning_rate": 8.606526681674831e-09, | |
| "loss": 0.0565, | |
| "step": 1518 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.5218941371791326, | |
| "learning_rate": 8.002913607064732e-09, | |
| "loss": 0.0959, | |
| "step": 1519 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.2566841880869064, | |
| "learning_rate": 7.421227099634887e-09, | |
| "loss": 0.067, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.673469964690565, | |
| "learning_rate": 6.861469714352087e-09, | |
| "loss": 0.086, | |
| "step": 1521 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.4357324182592992, | |
| "learning_rate": 6.323643909863508e-09, | |
| "loss": 0.0783, | |
| "step": 1522 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.7508482265946752, | |
| "learning_rate": 5.8077520484844944e-09, | |
| "loss": 0.1297, | |
| "step": 1523 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.5920805207057862, | |
| "learning_rate": 5.3137963961896745e-09, | |
| "loss": 0.1059, | |
| "step": 1524 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.701684710269744, | |
| "learning_rate": 4.841779122601864e-09, | |
| "loss": 0.0931, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.8670628864440295, | |
| "learning_rate": 4.391702300983735e-09, | |
| "loss": 0.1095, | |
| "step": 1526 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.908885162509405, | |
| "learning_rate": 3.963567908227273e-09, | |
| "loss": 0.1092, | |
| "step": 1527 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.5062237753340408, | |
| "learning_rate": 3.5573778248454426e-09, | |
| "loss": 0.1203, | |
| "step": 1528 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.7524116046366505, | |
| "learning_rate": 3.173133834964981e-09, | |
| "loss": 0.1093, | |
| "step": 1529 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.9006153233468437, | |
| "learning_rate": 2.8108376263175087e-09, | |
| "loss": 0.1049, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.1102330560966145, | |
| "learning_rate": 2.470490790232871e-09, | |
| "loss": 0.1352, | |
| "step": 1531 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.8097652022371047, | |
| "learning_rate": 2.1520948216308124e-09, | |
| "loss": 0.1154, | |
| "step": 1532 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.6090194528912374, | |
| "learning_rate": 1.8556511190165326e-09, | |
| "loss": 0.111, | |
| "step": 1533 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.6317356695738183, | |
| "learning_rate": 1.5811609844718078e-09, | |
| "loss": 0.1025, | |
| "step": 1534 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.896746595936969, | |
| "learning_rate": 1.3286256236522133e-09, | |
| "loss": 0.1372, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.2497007471454076, | |
| "learning_rate": 1.0980461457799075e-09, | |
| "loss": 0.1063, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.1785740989098141, | |
| "learning_rate": 8.894235636386362e-10, | |
| "loss": 0.0622, | |
| "step": 1537 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.056741697011482, | |
| "learning_rate": 7.027587935715119e-10, | |
| "loss": 0.1564, | |
| "step": 1538 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 4.0200483834115825, | |
| "learning_rate": 5.380526554737974e-10, | |
| "loss": 0.0995, | |
| "step": 1539 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.7186699717455354, | |
| "learning_rate": 3.9530587279124065e-10, | |
| "loss": 0.0835, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.8843448685818118, | |
| "learning_rate": 2.7451907251729857e-10, | |
| "loss": 0.1151, | |
| "step": 1541 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.363354272019609, | |
| "learning_rate": 1.756927851881418e-10, | |
| "loss": 0.1365, | |
| "step": 1542 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 7.555450224617764, | |
| "learning_rate": 9.882744488431961e-11, | |
| "loss": 0.1124, | |
| "step": 1543 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.729701488199868, | |
| "learning_rate": 4.392338922354356e-11, | |
| "loss": 0.1046, | |
| "step": 1544 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.2949144400623056, | |
| "learning_rate": 1.098085936401816e-11, | |
| "loss": 0.1159, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.637260805128306, | |
| "learning_rate": 0.0, | |
| "loss": 0.1204, | |
| "step": 1546 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1546, | |
| "total_flos": 71888985692160.0, | |
| "train_loss": 0.12158192890572131, | |
| "train_runtime": 29405.3322, | |
| "train_samples_per_second": 4.207, | |
| "train_steps_per_second": 0.053 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1546, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "total_flos": 71888985692160.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |