| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 38.61003861003861, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03861003861003861, | |
| "grad_norm": 13.287198066711426, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.38, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07722007722007722, | |
| "grad_norm": 6.655487537384033, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.9573, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.11583011583011583, | |
| "grad_norm": 4.606926441192627, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.6618, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.15444015444015444, | |
| "grad_norm": 3.794658660888672, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.4893, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.19305019305019305, | |
| "grad_norm": 3.480128049850464, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.3991, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.23166023166023167, | |
| "grad_norm": 2.990638494491577, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.3731, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2702702702702703, | |
| "grad_norm": 3.4375221729278564, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.3468, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3088803088803089, | |
| "grad_norm": 3.048978328704834, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.299, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3474903474903475, | |
| "grad_norm": 2.6503806114196777, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.29, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3861003861003861, | |
| "grad_norm": 2.6138811111450195, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.2716, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4247104247104247, | |
| "grad_norm": 2.6181092262268066, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.2841, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.46332046332046334, | |
| "grad_norm": 2.2942309379577637, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.2886, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5019305019305019, | |
| "grad_norm": 2.857469081878662, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.2244, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5405405405405406, | |
| "grad_norm": 2.050870418548584, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.249, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5791505791505791, | |
| "grad_norm": 2.1127007007598877, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.242, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6177606177606177, | |
| "grad_norm": 2.074040651321411, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.1953, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6563706563706564, | |
| "grad_norm": 1.7310792207717896, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.2109, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.694980694980695, | |
| "grad_norm": 1.7166472673416138, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.1997, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7335907335907336, | |
| "grad_norm": 2.083514451980591, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.1976, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7722007722007722, | |
| "grad_norm": 2.140868663787842, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.1892, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8108108108108109, | |
| "grad_norm": 1.3370658159255981, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.1806, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8494208494208494, | |
| "grad_norm": 1.2409409284591675, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.2032, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.888030888030888, | |
| "grad_norm": 1.60843026638031, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.2125, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.9266409266409267, | |
| "grad_norm": 1.6353750228881836, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.1991, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9652509652509652, | |
| "grad_norm": 1.9422987699508667, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.1939, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0038610038610039, | |
| "grad_norm": 2.1851253509521484, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.1796, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.0424710424710424, | |
| "grad_norm": 1.8957761526107788, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.1836, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0810810810810811, | |
| "grad_norm": 1.7820072174072266, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.1887, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.1196911196911197, | |
| "grad_norm": 1.6855354309082031, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.2198, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.1583011583011582, | |
| "grad_norm": 1.629774570465088, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.2079, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.196911196911197, | |
| "grad_norm": 1.340558648109436, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.2119, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.2355212355212355, | |
| "grad_norm": 0.9606438875198364, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.1559, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.2741312741312742, | |
| "grad_norm": 1.6456347703933716, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.2021, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.3127413127413128, | |
| "grad_norm": 1.6377339363098145, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.1979, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.3513513513513513, | |
| "grad_norm": 1.6483289003372192, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.1935, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.3899613899613898, | |
| "grad_norm": 1.1614036560058594, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.1608, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 1.685562014579773, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.1895, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.4671814671814671, | |
| "grad_norm": 1.646178960800171, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.195, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.505791505791506, | |
| "grad_norm": 1.4592519998550415, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.2059, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.5444015444015444, | |
| "grad_norm": 0.9366406798362732, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.2017, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.583011583011583, | |
| "grad_norm": 0.8088350296020508, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.1956, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.6216216216216215, | |
| "grad_norm": 1.1527583599090576, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.1754, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.6602316602316602, | |
| "grad_norm": 1.142561435699463, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.2244, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.698841698841699, | |
| "grad_norm": 1.2559890747070312, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.1964, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.7374517374517375, | |
| "grad_norm": 1.1182104349136353, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.1857, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.776061776061776, | |
| "grad_norm": 1.0533323287963867, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.1765, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.8146718146718146, | |
| "grad_norm": 0.9969793558120728, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.1737, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.8532818532818531, | |
| "grad_norm": 1.2537906169891357, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.1935, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.8918918918918919, | |
| "grad_norm": 0.8507728576660156, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.1926, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.9305019305019306, | |
| "grad_norm": 1.64980947971344, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.1639, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.9691119691119692, | |
| "grad_norm": 1.5939923524856567, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.1925, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.0077220077220077, | |
| "grad_norm": 1.505319595336914, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.1872, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.0463320463320462, | |
| "grad_norm": 1.1174558401107788, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.2034, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.0849420849420848, | |
| "grad_norm": 0.7865064144134521, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.1539, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.1235521235521237, | |
| "grad_norm": 1.0896923542022705, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.2019, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.1621621621621623, | |
| "grad_norm": 0.8688647747039795, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.2035, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.200772200772201, | |
| "grad_norm": 1.3430683612823486, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.1787, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.2393822393822393, | |
| "grad_norm": 1.0575079917907715, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.1488, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.277992277992278, | |
| "grad_norm": 1.3032593727111816, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.1664, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.3166023166023164, | |
| "grad_norm": 1.1514618396759033, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.1885, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.3552123552123554, | |
| "grad_norm": 0.8944717049598694, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.1736, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.393822393822394, | |
| "grad_norm": 1.0690749883651733, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.1568, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.4324324324324325, | |
| "grad_norm": 0.8698034286499023, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.1628, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.471042471042471, | |
| "grad_norm": 0.7732095718383789, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.1761, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.5096525096525095, | |
| "grad_norm": 0.8875847458839417, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.1659, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.5482625482625485, | |
| "grad_norm": 0.719978392124176, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.1846, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.586872586872587, | |
| "grad_norm": 1.2403405904769897, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.1613, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.6254826254826256, | |
| "grad_norm": 1.445119857788086, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.1797, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.664092664092664, | |
| "grad_norm": 0.6605463624000549, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.1607, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.7027027027027026, | |
| "grad_norm": 0.8701773285865784, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.17, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.741312741312741, | |
| "grad_norm": 0.7130756974220276, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.1433, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.7799227799227797, | |
| "grad_norm": 0.9633455872535706, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.1565, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.8185328185328187, | |
| "grad_norm": 1.4280503988265991, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.1426, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 1.0674346685409546, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.1429, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.8957528957528957, | |
| "grad_norm": 0.8652824759483337, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.1796, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.9343629343629343, | |
| "grad_norm": 1.0494927167892456, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.1449, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.972972972972973, | |
| "grad_norm": 1.1388959884643555, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.1535, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.011583011583012, | |
| "grad_norm": 1.15719473361969, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.1746, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.0501930501930503, | |
| "grad_norm": 0.8897600769996643, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.1499, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.088803088803089, | |
| "grad_norm": 0.865238606929779, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.1429, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.1274131274131274, | |
| "grad_norm": 0.8140860795974731, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.1563, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.166023166023166, | |
| "grad_norm": 0.7682332396507263, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.1493, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.2046332046332044, | |
| "grad_norm": 0.9081019759178162, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.1887, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.2432432432432434, | |
| "grad_norm": 1.0015044212341309, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.1574, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.281853281853282, | |
| "grad_norm": 1.160292625427246, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.1714, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.3204633204633205, | |
| "grad_norm": 0.9939375519752502, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.1456, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.359073359073359, | |
| "grad_norm": 0.7976135015487671, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.1891, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.3976833976833976, | |
| "grad_norm": 1.28683602809906, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.1496, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.436293436293436, | |
| "grad_norm": 0.7454524636268616, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.1568, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.474903474903475, | |
| "grad_norm": 1.0883091688156128, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.1612, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.5135135135135136, | |
| "grad_norm": 1.1127383708953857, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.1541, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.552123552123552, | |
| "grad_norm": 0.8523925542831421, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.157, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.5907335907335907, | |
| "grad_norm": 0.7145108580589294, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.1639, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.629343629343629, | |
| "grad_norm": 0.9005628824234009, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.16, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.667953667953668, | |
| "grad_norm": 1.1173086166381836, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.1246, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.7065637065637067, | |
| "grad_norm": 1.0558993816375732, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.1411, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.7451737451737452, | |
| "grad_norm": 1.1991870403289795, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.1791, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.7837837837837838, | |
| "grad_norm": 0.6233035922050476, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.1197, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.8223938223938223, | |
| "grad_norm": 0.9723224639892578, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.1134, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 3.861003861003861, | |
| "grad_norm": 0.6947616934776306, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.1405, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.8996138996138994, | |
| "grad_norm": 0.5903553366661072, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.1422, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 3.9382239382239383, | |
| "grad_norm": 0.5389763116836548, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.1426, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.976833976833977, | |
| "grad_norm": 0.8586219549179077, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.1351, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.015444015444015, | |
| "grad_norm": 0.6858639121055603, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.1168, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.054054054054054, | |
| "grad_norm": 0.9002066254615784, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.1275, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.0926640926640925, | |
| "grad_norm": 0.6978407502174377, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.1505, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 4.1312741312741315, | |
| "grad_norm": 1.1149928569793701, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.163, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.1698841698841695, | |
| "grad_norm": 0.642062246799469, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.1669, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.2084942084942085, | |
| "grad_norm": 0.5878561735153198, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.1483, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.2471042471042475, | |
| "grad_norm": 0.707226037979126, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.1175, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.285714285714286, | |
| "grad_norm": 0.6299799084663391, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.1541, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 4.324324324324325, | |
| "grad_norm": 0.594017505645752, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.1224, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.362934362934363, | |
| "grad_norm": 0.9055441617965698, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.1474, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.401544401544402, | |
| "grad_norm": 1.1802901029586792, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.1697, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.440154440154441, | |
| "grad_norm": 0.6890931725502014, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.148, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.478764478764479, | |
| "grad_norm": 0.5606589317321777, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.1344, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.517374517374518, | |
| "grad_norm": 0.6417577266693115, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.1375, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.555984555984556, | |
| "grad_norm": 0.8422744870185852, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.1222, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.594594594594595, | |
| "grad_norm": 0.7227635979652405, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.1407, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.633204633204633, | |
| "grad_norm": 0.9805036187171936, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.1473, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.671814671814672, | |
| "grad_norm": 0.6718726754188538, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.1456, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.710424710424711, | |
| "grad_norm": 0.9643238186836243, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.1352, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.749034749034749, | |
| "grad_norm": 0.9292412400245667, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.1409, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.787644787644788, | |
| "grad_norm": 0.7480415105819702, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.1378, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 4.826254826254826, | |
| "grad_norm": 0.938568115234375, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.1175, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.864864864864865, | |
| "grad_norm": 0.6812225580215454, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.1351, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 4.903474903474903, | |
| "grad_norm": 0.5791270732879639, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.1182, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 4.942084942084942, | |
| "grad_norm": 0.7875615954399109, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.137, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 4.980694980694981, | |
| "grad_norm": 0.49673154950141907, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.1387, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 5.019305019305019, | |
| "grad_norm": 0.701742947101593, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.1337, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 5.057915057915058, | |
| "grad_norm": 0.5022175312042236, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.1316, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 5.096525096525096, | |
| "grad_norm": 0.7052822113037109, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.1272, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 5.135135135135135, | |
| "grad_norm": 0.9381476640701294, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.1201, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 5.173745173745174, | |
| "grad_norm": 0.7570599317550659, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.1149, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 5.212355212355212, | |
| "grad_norm": 0.7424160242080688, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.1311, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 5.250965250965251, | |
| "grad_norm": 0.7185351252555847, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.1431, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 5.289575289575289, | |
| "grad_norm": 0.8532226085662842, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.1392, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 5.328185328185328, | |
| "grad_norm": 0.6534046530723572, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.123, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 5.366795366795367, | |
| "grad_norm": 0.5626727938652039, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.1367, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 5.405405405405405, | |
| "grad_norm": 0.8905888795852661, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.122, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 5.444015444015444, | |
| "grad_norm": 0.818387508392334, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.1366, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 5.482625482625482, | |
| "grad_norm": 0.9816891551017761, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.1332, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 5.521235521235521, | |
| "grad_norm": 0.46126094460487366, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.1001, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 5.559845559845559, | |
| "grad_norm": 0.6799705028533936, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.1407, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 5.598455598455598, | |
| "grad_norm": 0.7840109467506409, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.1494, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 5.637065637065637, | |
| "grad_norm": 0.7920464277267456, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.1332, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 5.675675675675675, | |
| "grad_norm": 0.5265321135520935, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.1317, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 5.714285714285714, | |
| "grad_norm": 0.679107666015625, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.1062, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 5.752895752895753, | |
| "grad_norm": 0.6273481845855713, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.1289, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 5.7915057915057915, | |
| "grad_norm": 0.691649317741394, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.1186, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 5.8301158301158305, | |
| "grad_norm": 0.8244628310203552, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.1225, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 5.8687258687258685, | |
| "grad_norm": 1.0755887031555176, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.1317, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 5.9073359073359075, | |
| "grad_norm": 0.8774343132972717, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.1312, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 5.945945945945946, | |
| "grad_norm": 0.606974184513092, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.1145, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 5.984555984555985, | |
| "grad_norm": 0.5077939033508301, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.1241, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 6.023166023166024, | |
| "grad_norm": 0.5576596856117249, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.1247, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 6.061776061776062, | |
| "grad_norm": 1.0629464387893677, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.1075, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 6.100386100386101, | |
| "grad_norm": 0.5680471062660217, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.1225, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 6.138996138996139, | |
| "grad_norm": 0.6717116832733154, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.129, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 6.177606177606178, | |
| "grad_norm": 0.7363089919090271, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.1436, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 6.216216216216216, | |
| "grad_norm": 0.670937716960907, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.1029, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 6.254826254826255, | |
| "grad_norm": 0.6786972284317017, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.1215, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 6.293436293436294, | |
| "grad_norm": 0.5797168612480164, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.1499, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 6.332046332046332, | |
| "grad_norm": 1.157994031906128, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.1376, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 6.370656370656371, | |
| "grad_norm": 0.7316443920135498, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.1226, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 6.409266409266409, | |
| "grad_norm": 0.5234395265579224, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.1194, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 6.447876447876448, | |
| "grad_norm": 0.852341890335083, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.1053, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 6.486486486486487, | |
| "grad_norm": 0.8800656795501709, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.114, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 6.525096525096525, | |
| "grad_norm": 0.8578097820281982, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.1141, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 6.563706563706564, | |
| "grad_norm": 0.8600365519523621, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.1128, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 6.602316602316602, | |
| "grad_norm": 0.6659354567527771, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.1185, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 6.640926640926641, | |
| "grad_norm": 0.7638758420944214, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.1429, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 6.67953667953668, | |
| "grad_norm": 0.6346604824066162, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.1079, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 6.718146718146718, | |
| "grad_norm": 0.7118030190467834, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.1174, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 6.756756756756757, | |
| "grad_norm": 0.6951404213905334, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.1141, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 6.795366795366795, | |
| "grad_norm": 0.8285521864891052, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.1168, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 6.833976833976834, | |
| "grad_norm": 0.7002620697021484, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.109, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 6.872586872586872, | |
| "grad_norm": 0.5306683778762817, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.1324, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 6.911196911196911, | |
| "grad_norm": 0.7958775162696838, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.1098, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 6.94980694980695, | |
| "grad_norm": 0.5308017730712891, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.1135, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 6.988416988416988, | |
| "grad_norm": 0.5937675833702087, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.1073, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 7.027027027027027, | |
| "grad_norm": 0.72931307554245, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.1119, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 7.065637065637065, | |
| "grad_norm": 0.6811597347259521, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.1127, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 7.104247104247104, | |
| "grad_norm": 0.6665561199188232, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.1136, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 7.142857142857143, | |
| "grad_norm": 0.6536176800727844, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.1464, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 7.181467181467181, | |
| "grad_norm": 0.8365440964698792, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.1455, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 7.22007722007722, | |
| "grad_norm": 0.6462043523788452, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0999, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 7.258687258687258, | |
| "grad_norm": 0.558756411075592, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.1062, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 7.297297297297297, | |
| "grad_norm": 0.8646144866943359, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.114, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 7.335907335907336, | |
| "grad_norm": 0.6492713689804077, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.1157, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 7.374517374517374, | |
| "grad_norm": 0.8752506375312805, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.1351, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 7.413127413127413, | |
| "grad_norm": 0.6606900095939636, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.1032, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 7.4517374517374515, | |
| "grad_norm": 0.6861739754676819, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.113, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 7.4903474903474905, | |
| "grad_norm": 0.6919797658920288, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.1155, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 7.528957528957529, | |
| "grad_norm": 0.6717447638511658, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.1057, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 7.5675675675675675, | |
| "grad_norm": 0.6557009220123291, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.1316, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 7.6061776061776065, | |
| "grad_norm": 0.46149829030036926, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0889, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 7.644787644787645, | |
| "grad_norm": 0.6347934603691101, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.1269, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 7.683397683397684, | |
| "grad_norm": 0.688076913356781, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0905, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 7.722007722007722, | |
| "grad_norm": 0.8914374113082886, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.1108, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.760617760617761, | |
| "grad_norm": 0.7588872313499451, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.1348, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 7.799227799227799, | |
| "grad_norm": 0.7361499071121216, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.1198, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 7.837837837837838, | |
| "grad_norm": 0.6843787431716919, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0972, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 7.876447876447877, | |
| "grad_norm": 0.6163288950920105, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.1171, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 7.915057915057915, | |
| "grad_norm": 0.8062847256660461, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.1143, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 7.953667953667954, | |
| "grad_norm": 0.7317461371421814, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.1101, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 7.992277992277993, | |
| "grad_norm": 0.6277090311050415, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0967, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 8.03088803088803, | |
| "grad_norm": 0.5492644309997559, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0988, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 8.069498069498069, | |
| "grad_norm": 0.9302526116371155, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.1069, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 8.108108108108109, | |
| "grad_norm": 0.6697160005569458, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.1049, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 8.146718146718147, | |
| "grad_norm": 0.844696581363678, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.1387, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 8.185328185328185, | |
| "grad_norm": 0.5563222169876099, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.1177, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 8.223938223938223, | |
| "grad_norm": 0.585287868976593, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0899, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 8.262548262548263, | |
| "grad_norm": 0.6320898532867432, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0855, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 8.301158301158301, | |
| "grad_norm": 0.8141903281211853, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.1256, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 8.339768339768339, | |
| "grad_norm": 0.5062960982322693, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.1093, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 8.378378378378379, | |
| "grad_norm": 0.42139625549316406, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.102, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 8.416988416988417, | |
| "grad_norm": 0.515316367149353, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.112, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 8.455598455598455, | |
| "grad_norm": 0.7278834581375122, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.1132, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 8.494208494208495, | |
| "grad_norm": 0.6631847023963928, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0936, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 8.532818532818533, | |
| "grad_norm": 0.5388717651367188, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.1109, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 8.571428571428571, | |
| "grad_norm": 0.826079785823822, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.1007, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 8.61003861003861, | |
| "grad_norm": 0.7166194915771484, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.1059, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 8.64864864864865, | |
| "grad_norm": 0.5368038415908813, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.1064, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 8.687258687258687, | |
| "grad_norm": 0.7046194076538086, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.1185, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 8.725868725868725, | |
| "grad_norm": 0.7599208950996399, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0974, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 8.764478764478765, | |
| "grad_norm": 0.7834563255310059, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0964, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 8.803088803088803, | |
| "grad_norm": 0.6925577521324158, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.1009, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 8.841698841698841, | |
| "grad_norm": 0.5668750405311584, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.129, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 8.880308880308881, | |
| "grad_norm": 0.469650536775589, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0954, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 8.91891891891892, | |
| "grad_norm": 0.5168886780738831, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.11, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 8.957528957528957, | |
| "grad_norm": 0.5102772116661072, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0921, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 8.996138996138995, | |
| "grad_norm": 0.6147180795669556, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.1241, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 9.034749034749035, | |
| "grad_norm": 0.39069607853889465, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0937, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 9.073359073359073, | |
| "grad_norm": 0.4520420730113983, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0984, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 9.111969111969112, | |
| "grad_norm": 0.5111847519874573, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.112, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 9.150579150579151, | |
| "grad_norm": 0.7047441601753235, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.1154, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 9.18918918918919, | |
| "grad_norm": 0.4314562976360321, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.1205, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 9.227799227799228, | |
| "grad_norm": 0.5829694271087646, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.1033, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 9.266409266409266, | |
| "grad_norm": 0.5105317234992981, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.1015, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 9.305019305019306, | |
| "grad_norm": 0.7209296822547913, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.1052, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 9.343629343629344, | |
| "grad_norm": 0.5549200773239136, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.1383, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 9.382239382239382, | |
| "grad_norm": 0.7225512266159058, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.1168, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 9.420849420849422, | |
| "grad_norm": 0.5784018635749817, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.1168, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 9.45945945945946, | |
| "grad_norm": 0.7874284982681274, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.1087, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 9.498069498069498, | |
| "grad_norm": 0.4949483871459961, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0976, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 9.536679536679536, | |
| "grad_norm": 0.8492587208747864, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.095, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 9.575289575289576, | |
| "grad_norm": 0.5878174901008606, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.1085, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 9.613899613899614, | |
| "grad_norm": 0.6225532293319702, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0889, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 9.652509652509652, | |
| "grad_norm": 0.5066150426864624, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.087, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 9.691119691119692, | |
| "grad_norm": 0.48042768239974976, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0767, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 9.72972972972973, | |
| "grad_norm": 0.6562822461128235, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0901, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 9.768339768339768, | |
| "grad_norm": 0.723908543586731, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0899, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 9.806949806949808, | |
| "grad_norm": 0.8529309630393982, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.1066, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 9.845559845559846, | |
| "grad_norm": 0.483698308467865, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.1059, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 9.884169884169884, | |
| "grad_norm": 0.4942833185195923, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.1062, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 9.922779922779922, | |
| "grad_norm": 0.28115925192832947, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0947, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 9.961389961389962, | |
| "grad_norm": 0.41657555103302, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.1056, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.9054367542266846, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0991, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 10.038610038610038, | |
| "grad_norm": 0.4605831503868103, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.1027, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 10.077220077220078, | |
| "grad_norm": 0.5121803283691406, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0974, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 10.115830115830116, | |
| "grad_norm": 0.5194014310836792, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0852, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 10.154440154440154, | |
| "grad_norm": 0.49289292097091675, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.1083, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 10.193050193050192, | |
| "grad_norm": 0.47594016790390015, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.1142, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 10.231660231660232, | |
| "grad_norm": 0.5656114816665649, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.1038, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 10.27027027027027, | |
| "grad_norm": 0.575564980506897, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.1076, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 10.308880308880308, | |
| "grad_norm": 0.46538421511650085, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.1136, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 10.347490347490348, | |
| "grad_norm": 0.5079081654548645, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.1235, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 10.386100386100386, | |
| "grad_norm": 0.5175929069519043, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0942, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 10.424710424710424, | |
| "grad_norm": 0.5166839957237244, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0936, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 10.463320463320464, | |
| "grad_norm": 0.6094571948051453, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.1108, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 10.501930501930502, | |
| "grad_norm": 0.40622076392173767, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.1263, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 10.54054054054054, | |
| "grad_norm": 0.5963900685310364, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.102, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 10.579150579150578, | |
| "grad_norm": 0.8011244535446167, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0936, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 10.617760617760618, | |
| "grad_norm": 0.4866567552089691, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0888, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 10.656370656370656, | |
| "grad_norm": 0.5055763125419617, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0917, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 10.694980694980694, | |
| "grad_norm": 0.539553701877594, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0902, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 10.733590733590734, | |
| "grad_norm": 0.5983613729476929, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.1082, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 10.772200772200772, | |
| "grad_norm": 0.6280248761177063, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.1028, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 10.81081081081081, | |
| "grad_norm": 0.6110442280769348, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0965, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 10.849420849420849, | |
| "grad_norm": 0.7813714742660522, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.1389, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 10.888030888030888, | |
| "grad_norm": 0.6177837252616882, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.1098, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 10.926640926640927, | |
| "grad_norm": 0.5030504465103149, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0877, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 10.965250965250965, | |
| "grad_norm": 0.7109388709068298, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.1064, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 11.003861003861005, | |
| "grad_norm": 0.5707525014877319, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.115, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 11.042471042471043, | |
| "grad_norm": 0.5837938785552979, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0986, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 11.08108108108108, | |
| "grad_norm": 0.556896984577179, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0921, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 11.11969111969112, | |
| "grad_norm": 0.47775787115097046, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0973, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 11.158301158301159, | |
| "grad_norm": 0.47152382135391235, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.1008, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 11.196911196911197, | |
| "grad_norm": 0.40532755851745605, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.1108, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 11.235521235521235, | |
| "grad_norm": 0.5269514918327332, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0884, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 11.274131274131275, | |
| "grad_norm": 0.48876118659973145, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.109, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 11.312741312741313, | |
| "grad_norm": 0.507595956325531, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0859, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 11.35135135135135, | |
| "grad_norm": 0.5442178845405579, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0884, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 11.38996138996139, | |
| "grad_norm": 0.44095930457115173, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0829, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 11.428571428571429, | |
| "grad_norm": 0.7041499018669128, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.0959, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 11.467181467181467, | |
| "grad_norm": 0.7257633209228516, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0875, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 11.505791505791505, | |
| "grad_norm": 0.5796602368354797, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0986, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 11.544401544401545, | |
| "grad_norm": 0.6739242672920227, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.1102, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 11.583011583011583, | |
| "grad_norm": 0.6059641242027283, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.1094, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 11.621621621621621, | |
| "grad_norm": 0.9224318265914917, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0836, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 11.660231660231661, | |
| "grad_norm": 0.5493955016136169, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0835, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 11.698841698841699, | |
| "grad_norm": 0.5447977781295776, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0906, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 11.737451737451737, | |
| "grad_norm": 0.6024081707000732, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0977, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 11.776061776061777, | |
| "grad_norm": 0.7188856601715088, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0939, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 11.814671814671815, | |
| "grad_norm": 0.4234011769294739, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.1001, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 11.853281853281853, | |
| "grad_norm": 0.47279033064842224, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.082, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 11.891891891891891, | |
| "grad_norm": 0.6048724055290222, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.1029, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 11.930501930501931, | |
| "grad_norm": 0.6834560036659241, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.1116, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 11.96911196911197, | |
| "grad_norm": 0.730119526386261, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0909, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 12.007722007722007, | |
| "grad_norm": 0.5458978414535522, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0786, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 12.046332046332047, | |
| "grad_norm": 0.7734132409095764, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0772, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 12.084942084942085, | |
| "grad_norm": 0.6241917610168457, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0725, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 12.123552123552123, | |
| "grad_norm": 0.7433600425720215, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0903, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 12.162162162162161, | |
| "grad_norm": 0.511486291885376, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0969, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 12.200772200772201, | |
| "grad_norm": 0.5927137732505798, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.1011, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 12.23938223938224, | |
| "grad_norm": 0.3921918272972107, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0793, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 12.277992277992277, | |
| "grad_norm": 0.719854474067688, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.1059, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 12.316602316602317, | |
| "grad_norm": 0.5687322616577148, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.1051, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 12.355212355212355, | |
| "grad_norm": 0.6002088189125061, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0915, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 12.393822393822393, | |
| "grad_norm": 0.5544345378875732, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.087, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 12.432432432432432, | |
| "grad_norm": 3.415940999984741, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.1167, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 12.471042471042471, | |
| "grad_norm": 0.5576305985450745, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.1188, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 12.50965250965251, | |
| "grad_norm": 0.6188719868659973, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0834, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 12.548262548262548, | |
| "grad_norm": 0.4263285994529724, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.1006, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 12.586872586872587, | |
| "grad_norm": 0.6959123611450195, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0959, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 12.625482625482626, | |
| "grad_norm": 0.5221771001815796, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0953, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 12.664092664092664, | |
| "grad_norm": 0.71525639295578, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0808, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 12.702702702702704, | |
| "grad_norm": 0.7279417514801025, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0867, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 12.741312741312742, | |
| "grad_norm": 0.5896912217140198, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0848, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 12.77992277992278, | |
| "grad_norm": 0.6318273544311523, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0888, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 12.818532818532818, | |
| "grad_norm": 0.5931304693222046, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0892, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 12.857142857142858, | |
| "grad_norm": 0.5776562094688416, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0912, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 12.895752895752896, | |
| "grad_norm": 0.5498374104499817, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0942, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 12.934362934362934, | |
| "grad_norm": 0.4531612992286682, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0951, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 12.972972972972974, | |
| "grad_norm": 0.5652436017990112, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.083, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 13.011583011583012, | |
| "grad_norm": 0.5419857501983643, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0894, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 13.05019305019305, | |
| "grad_norm": 0.5802552103996277, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.1176, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 13.088803088803088, | |
| "grad_norm": 0.6911759972572327, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0793, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 13.127413127413128, | |
| "grad_norm": 0.6647396087646484, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0913, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 13.166023166023166, | |
| "grad_norm": 0.7876801490783691, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.1018, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 13.204633204633204, | |
| "grad_norm": 0.5604192018508911, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0759, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 13.243243243243244, | |
| "grad_norm": 0.529370903968811, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0933, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 13.281853281853282, | |
| "grad_norm": 0.49177083373069763, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0975, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 13.32046332046332, | |
| "grad_norm": 0.46308794617652893, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0941, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 13.35907335907336, | |
| "grad_norm": 0.5650749802589417, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0805, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 13.397683397683398, | |
| "grad_norm": 0.36478251218795776, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.1041, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 13.436293436293436, | |
| "grad_norm": 0.43773508071899414, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.1053, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 13.474903474903474, | |
| "grad_norm": 0.6326601505279541, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0999, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 13.513513513513514, | |
| "grad_norm": 0.7280349135398865, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0776, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 13.552123552123552, | |
| "grad_norm": 0.725627601146698, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0937, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 13.59073359073359, | |
| "grad_norm": 0.5294554829597473, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0859, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 13.62934362934363, | |
| "grad_norm": 0.5159903168678284, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.086, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 13.667953667953668, | |
| "grad_norm": 0.5003053545951843, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.086, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 13.706563706563706, | |
| "grad_norm": 0.5443400144577026, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0833, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 13.745173745173744, | |
| "grad_norm": 0.6605769395828247, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0746, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 13.783783783783784, | |
| "grad_norm": 0.5426715016365051, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0875, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 13.822393822393822, | |
| "grad_norm": 0.41429880261421204, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0948, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 13.86100386100386, | |
| "grad_norm": 0.5427739024162292, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0916, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 13.8996138996139, | |
| "grad_norm": 0.6028660535812378, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0882, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 13.938223938223938, | |
| "grad_norm": 0.6504780054092407, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0821, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 13.976833976833976, | |
| "grad_norm": 0.6405124068260193, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0805, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 14.015444015444016, | |
| "grad_norm": 0.7024744749069214, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0648, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 14.054054054054054, | |
| "grad_norm": 0.46351149678230286, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0798, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 14.092664092664092, | |
| "grad_norm": 0.5199838280677795, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0856, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 14.13127413127413, | |
| "grad_norm": 0.6727871298789978, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0904, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 14.16988416988417, | |
| "grad_norm": 0.5358639359474182, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.1062, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 14.208494208494209, | |
| "grad_norm": 0.6725749373435974, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0881, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 14.247104247104247, | |
| "grad_norm": 0.6883063912391663, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0736, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 14.285714285714286, | |
| "grad_norm": 0.6106855869293213, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0888, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 14.324324324324325, | |
| "grad_norm": 0.5512902140617371, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0883, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 14.362934362934363, | |
| "grad_norm": 0.6435045599937439, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0919, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 14.4015444015444, | |
| "grad_norm": 0.6004988551139832, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0677, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 14.44015444015444, | |
| "grad_norm": 0.5087930560112, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0679, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 14.478764478764479, | |
| "grad_norm": 0.44460296630859375, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0838, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 14.517374517374517, | |
| "grad_norm": 0.372995525598526, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0831, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 14.555984555984557, | |
| "grad_norm": 0.6550002694129944, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.1051, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 14.594594594594595, | |
| "grad_norm": 0.5978309512138367, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.096, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 14.633204633204633, | |
| "grad_norm": 0.3988819420337677, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0784, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 14.671814671814673, | |
| "grad_norm": 0.5307171940803528, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.1048, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 14.71042471042471, | |
| "grad_norm": 0.6089282631874084, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.073, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 14.749034749034749, | |
| "grad_norm": 0.6973853707313538, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0966, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 14.787644787644787, | |
| "grad_norm": 0.39602163434028625, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0708, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 14.826254826254827, | |
| "grad_norm": 0.5536203384399414, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0701, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 14.864864864864865, | |
| "grad_norm": 0.5625492930412292, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.1033, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 14.903474903474903, | |
| "grad_norm": 0.8248059749603271, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.0879, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 14.942084942084943, | |
| "grad_norm": 1.0776482820510864, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0788, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 14.980694980694981, | |
| "grad_norm": 0.47371265292167664, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0804, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 15.019305019305019, | |
| "grad_norm": 0.48720476031303406, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.09, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 15.057915057915057, | |
| "grad_norm": 0.5072534084320068, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0861, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 15.096525096525097, | |
| "grad_norm": 0.6805068850517273, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0815, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 15.135135135135135, | |
| "grad_norm": 0.6972246766090393, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0951, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 15.173745173745173, | |
| "grad_norm": 0.5297501087188721, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0898, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 15.212355212355213, | |
| "grad_norm": 0.4296838641166687, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0783, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 15.250965250965251, | |
| "grad_norm": 0.6048504114151001, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0843, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 15.28957528957529, | |
| "grad_norm": 0.45158666372299194, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0847, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 15.328185328185327, | |
| "grad_norm": 0.6273931860923767, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0751, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 15.366795366795367, | |
| "grad_norm": 0.4455814063549042, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0821, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 15.405405405405405, | |
| "grad_norm": 0.6072818636894226, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0887, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 15.444015444015443, | |
| "grad_norm": 0.7125831246376038, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0901, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 15.482625482625483, | |
| "grad_norm": 0.5886876583099365, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0716, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 15.521235521235521, | |
| "grad_norm": 0.4565495252609253, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0769, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 15.55984555984556, | |
| "grad_norm": 0.5533151030540466, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.086, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 15.5984555984556, | |
| "grad_norm": 0.46190527081489563, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0528, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 15.637065637065637, | |
| "grad_norm": 1.3326513767242432, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0868, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 15.675675675675675, | |
| "grad_norm": 0.6459556818008423, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0709, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 15.714285714285714, | |
| "grad_norm": 0.4545834958553314, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0609, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 15.752895752895753, | |
| "grad_norm": 0.4588858187198639, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0855, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 15.791505791505791, | |
| "grad_norm": 0.8000584840774536, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0841, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 15.83011583011583, | |
| "grad_norm": 0.6274726390838623, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0781, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 15.86872586872587, | |
| "grad_norm": 0.5860035419464111, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0789, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 15.907335907335908, | |
| "grad_norm": 0.804555356502533, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.1013, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 15.945945945945946, | |
| "grad_norm": 0.5548028945922852, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0874, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 15.984555984555985, | |
| "grad_norm": 0.43698981404304504, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0866, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 16.023166023166024, | |
| "grad_norm": 1.1259561777114868, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.089, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 16.06177606177606, | |
| "grad_norm": 0.44387349486351013, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0757, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 16.1003861003861, | |
| "grad_norm": 0.5084031224250793, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0756, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 16.138996138996138, | |
| "grad_norm": 0.37173858284950256, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.077, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 16.177606177606176, | |
| "grad_norm": 0.6248420476913452, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0953, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 16.216216216216218, | |
| "grad_norm": 0.3940771222114563, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0671, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 16.254826254826256, | |
| "grad_norm": 0.6416221857070923, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0827, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 16.293436293436294, | |
| "grad_norm": 0.623534619808197, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.1018, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 16.332046332046332, | |
| "grad_norm": 0.6577959060668945, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.116, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 16.37065637065637, | |
| "grad_norm": 0.5343630909919739, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0923, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 16.409266409266408, | |
| "grad_norm": 0.43712395429611206, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0853, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 16.447876447876446, | |
| "grad_norm": 0.5086554884910583, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0849, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 16.486486486486488, | |
| "grad_norm": 0.5114151239395142, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0789, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 16.525096525096526, | |
| "grad_norm": 0.6828848123550415, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0769, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 16.563706563706564, | |
| "grad_norm": 0.7185290455818176, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0829, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 16.602316602316602, | |
| "grad_norm": 0.5070241093635559, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0558, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 16.64092664092664, | |
| "grad_norm": 0.38561224937438965, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0823, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 16.679536679536678, | |
| "grad_norm": 0.7504719495773315, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0918, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 16.71814671814672, | |
| "grad_norm": 0.4384387135505676, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.071, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 16.756756756756758, | |
| "grad_norm": 0.5349385738372803, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0596, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 16.795366795366796, | |
| "grad_norm": 0.5113857388496399, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0576, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 16.833976833976834, | |
| "grad_norm": 0.5070815086364746, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0739, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 16.872586872586872, | |
| "grad_norm": 0.4332325756549835, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0808, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 16.91119691119691, | |
| "grad_norm": 0.4109137952327728, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0822, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 16.94980694980695, | |
| "grad_norm": 0.5654485821723938, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0659, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 16.98841698841699, | |
| "grad_norm": 0.5623957514762878, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0886, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 17.027027027027028, | |
| "grad_norm": 0.5168842077255249, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0737, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 17.065637065637066, | |
| "grad_norm": 0.4881226122379303, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0983, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 17.104247104247104, | |
| "grad_norm": 0.5036429166793823, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0842, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 17.142857142857142, | |
| "grad_norm": 0.5281261205673218, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0827, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 17.18146718146718, | |
| "grad_norm": 0.6240306496620178, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.1004, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 17.22007722007722, | |
| "grad_norm": 0.5073460340499878, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0693, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 17.25868725868726, | |
| "grad_norm": 0.4580148756504059, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0734, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 17.2972972972973, | |
| "grad_norm": 0.6077268719673157, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0826, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 17.335907335907336, | |
| "grad_norm": 0.5811083316802979, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0883, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 17.374517374517374, | |
| "grad_norm": 0.6039600372314453, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0817, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 17.413127413127413, | |
| "grad_norm": 0.3778926730155945, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0911, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 17.45173745173745, | |
| "grad_norm": 0.5251861810684204, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0708, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 17.49034749034749, | |
| "grad_norm": 0.5521923303604126, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0656, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 17.52895752895753, | |
| "grad_norm": 0.598902702331543, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.1051, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 17.56756756756757, | |
| "grad_norm": 0.3929014205932617, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0784, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 17.606177606177607, | |
| "grad_norm": 0.5811739563941956, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.06, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 17.644787644787645, | |
| "grad_norm": 0.6186365485191345, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0916, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 17.683397683397683, | |
| "grad_norm": 0.6466165781021118, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0634, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 17.72200772200772, | |
| "grad_norm": 0.6939656734466553, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0772, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 17.760617760617762, | |
| "grad_norm": 0.6492602825164795, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0697, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 17.7992277992278, | |
| "grad_norm": 0.6554388403892517, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0785, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 17.83783783783784, | |
| "grad_norm": 0.5153865814208984, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0619, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 17.876447876447877, | |
| "grad_norm": 0.7237990498542786, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.082, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 17.915057915057915, | |
| "grad_norm": 0.4947359561920166, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0818, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 17.953667953667953, | |
| "grad_norm": 0.5115700364112854, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0679, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 17.99227799227799, | |
| "grad_norm": 0.5944989919662476, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0772, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 18.030888030888033, | |
| "grad_norm": 0.5000519752502441, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0689, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 18.06949806949807, | |
| "grad_norm": 0.46222320199012756, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0767, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 18.10810810810811, | |
| "grad_norm": 0.5670591592788696, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0865, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 18.146718146718147, | |
| "grad_norm": 0.6035292148590088, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0805, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 18.185328185328185, | |
| "grad_norm": 0.6038900017738342, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0782, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 18.223938223938223, | |
| "grad_norm": 0.61341392993927, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0634, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 18.26254826254826, | |
| "grad_norm": 0.3309182822704315, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.074, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 18.301158301158303, | |
| "grad_norm": 0.3919869065284729, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0717, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 18.33976833976834, | |
| "grad_norm": 0.47341981530189514, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0634, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 18.37837837837838, | |
| "grad_norm": 0.6534469127655029, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0782, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 18.416988416988417, | |
| "grad_norm": 0.567298412322998, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0773, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 18.455598455598455, | |
| "grad_norm": 0.4733419120311737, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0843, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 18.494208494208493, | |
| "grad_norm": 0.646246075630188, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0643, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 18.53281853281853, | |
| "grad_norm": 0.596415638923645, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0781, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 18.571428571428573, | |
| "grad_norm": 0.7743390798568726, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0666, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 18.61003861003861, | |
| "grad_norm": 0.5262377858161926, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.075, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 18.64864864864865, | |
| "grad_norm": 0.5140449404716492, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0697, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 18.687258687258687, | |
| "grad_norm": 0.5163661241531372, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0625, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 18.725868725868725, | |
| "grad_norm": 0.6141701340675354, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0698, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 18.764478764478763, | |
| "grad_norm": 0.627004086971283, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0564, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 18.8030888030888, | |
| "grad_norm": 0.522399365901947, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0681, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 18.841698841698843, | |
| "grad_norm": 0.4054034948348999, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0725, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 18.88030888030888, | |
| "grad_norm": 0.5233802795410156, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.091, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 18.91891891891892, | |
| "grad_norm": 0.6394193172454834, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0563, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 18.957528957528957, | |
| "grad_norm": 0.48193085193634033, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0692, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 18.996138996138995, | |
| "grad_norm": 0.5722125768661499, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0779, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 19.034749034749034, | |
| "grad_norm": 0.5283990502357483, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0634, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 19.07335907335907, | |
| "grad_norm": 0.3909274935722351, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0513, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 19.111969111969113, | |
| "grad_norm": 0.44363778829574585, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0715, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 19.15057915057915, | |
| "grad_norm": 0.750079333782196, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.075, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 19.18918918918919, | |
| "grad_norm": 0.42553237080574036, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0837, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 19.227799227799228, | |
| "grad_norm": 0.5493687987327576, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0748, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 19.266409266409266, | |
| "grad_norm": 0.4793163239955902, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0763, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 19.305019305019304, | |
| "grad_norm": 0.4897761344909668, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0827, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 19.343629343629345, | |
| "grad_norm": 0.6924915313720703, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0819, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 19.382239382239383, | |
| "grad_norm": 0.69071364402771, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0736, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 19.42084942084942, | |
| "grad_norm": 0.7378138303756714, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0587, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 19.45945945945946, | |
| "grad_norm": 0.7582262754440308, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0649, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 19.498069498069498, | |
| "grad_norm": 0.5765745639801025, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0731, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 19.536679536679536, | |
| "grad_norm": 0.47059738636016846, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0477, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 19.575289575289574, | |
| "grad_norm": 0.6599562168121338, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0912, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 19.613899613899616, | |
| "grad_norm": 0.4605643153190613, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0718, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 19.652509652509654, | |
| "grad_norm": 0.5245002508163452, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0598, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 19.69111969111969, | |
| "grad_norm": 0.4711160361766815, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0575, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 19.72972972972973, | |
| "grad_norm": 0.4068230986595154, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0659, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 19.768339768339768, | |
| "grad_norm": 0.40332168340682983, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0796, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 19.806949806949806, | |
| "grad_norm": 0.5021756887435913, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0699, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 19.845559845559844, | |
| "grad_norm": 0.6182597279548645, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0712, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 19.884169884169886, | |
| "grad_norm": 0.499675452709198, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0626, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 19.922779922779924, | |
| "grad_norm": 0.5602287650108337, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0869, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 19.961389961389962, | |
| "grad_norm": 0.5517196655273438, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0715, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.3541000485420227, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0529, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 20.038610038610038, | |
| "grad_norm": 0.40408554673194885, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0846, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 20.077220077220076, | |
| "grad_norm": 0.49958834052085876, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0501, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 20.115830115830114, | |
| "grad_norm": 0.4283312261104584, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0738, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 20.154440154440156, | |
| "grad_norm": 0.6037362813949585, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0698, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 20.193050193050194, | |
| "grad_norm": 0.4373033046722412, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0613, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 20.231660231660232, | |
| "grad_norm": 0.3218035101890564, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0603, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 20.27027027027027, | |
| "grad_norm": 0.6302839517593384, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.082, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 20.30888030888031, | |
| "grad_norm": 0.580318033695221, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0647, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 20.347490347490346, | |
| "grad_norm": 0.7693837881088257, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.073, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 20.386100386100384, | |
| "grad_norm": 0.437863826751709, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0761, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 20.424710424710426, | |
| "grad_norm": 0.5829715728759766, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0636, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 20.463320463320464, | |
| "grad_norm": 0.652866780757904, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0543, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 20.501930501930502, | |
| "grad_norm": 0.6355207562446594, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0552, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 20.54054054054054, | |
| "grad_norm": 0.6155616641044617, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0681, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 20.57915057915058, | |
| "grad_norm": 0.5970525145530701, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0639, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 20.617760617760617, | |
| "grad_norm": 0.3544915020465851, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0583, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 20.656370656370655, | |
| "grad_norm": 0.3478948175907135, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0491, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 20.694980694980696, | |
| "grad_norm": 0.42770978808403015, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0635, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 20.733590733590734, | |
| "grad_norm": 0.49829915165901184, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0494, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 20.772200772200772, | |
| "grad_norm": 0.6188846826553345, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0536, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 20.81081081081081, | |
| "grad_norm": 0.49089109897613525, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.058, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 20.84942084942085, | |
| "grad_norm": 0.5045559406280518, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0693, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 20.888030888030887, | |
| "grad_norm": 0.4698314964771271, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0632, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 20.92664092664093, | |
| "grad_norm": 0.37180498242378235, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0576, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 20.965250965250966, | |
| "grad_norm": 0.5959806442260742, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0604, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 21.003861003861005, | |
| "grad_norm": 0.3700093626976013, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0819, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 21.042471042471043, | |
| "grad_norm": 0.5958386659622192, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0427, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 21.08108108108108, | |
| "grad_norm": 0.47188055515289307, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0518, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 21.11969111969112, | |
| "grad_norm": 0.3665406107902527, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0557, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 21.158301158301157, | |
| "grad_norm": 0.49395236372947693, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0634, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 21.1969111969112, | |
| "grad_norm": 0.46031197905540466, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0617, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 21.235521235521237, | |
| "grad_norm": 0.46037164330482483, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0635, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 21.274131274131275, | |
| "grad_norm": 0.604911744594574, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0651, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 21.312741312741313, | |
| "grad_norm": 0.4139896333217621, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0607, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 21.35135135135135, | |
| "grad_norm": 0.3025991916656494, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.062, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 21.38996138996139, | |
| "grad_norm": 0.32330894470214844, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0513, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 21.428571428571427, | |
| "grad_norm": 0.5037228465080261, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0568, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 21.46718146718147, | |
| "grad_norm": 0.5318952798843384, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0523, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 21.505791505791507, | |
| "grad_norm": 0.6271405816078186, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0713, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 21.544401544401545, | |
| "grad_norm": 0.5892740488052368, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0797, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 21.583011583011583, | |
| "grad_norm": 0.516455352306366, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0577, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 21.62162162162162, | |
| "grad_norm": 0.5120159983634949, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0648, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 21.66023166023166, | |
| "grad_norm": 0.37961456179618835, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0609, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 21.698841698841697, | |
| "grad_norm": 0.48860493302345276, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0488, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 21.73745173745174, | |
| "grad_norm": 0.24185198545455933, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0523, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 21.776061776061777, | |
| "grad_norm": 0.5195246934890747, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0579, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 21.814671814671815, | |
| "grad_norm": 0.2884815037250519, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0711, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 21.853281853281853, | |
| "grad_norm": 0.507343053817749, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0621, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 21.89189189189189, | |
| "grad_norm": 0.5438768863677979, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0608, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 21.93050193050193, | |
| "grad_norm": 0.4898863434791565, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0642, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 21.969111969111967, | |
| "grad_norm": 0.4932737946510315, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0634, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 22.00772200772201, | |
| "grad_norm": 0.32997044920921326, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0469, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 22.046332046332047, | |
| "grad_norm": 0.338538259267807, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0617, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 22.084942084942085, | |
| "grad_norm": 0.49940410256385803, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0522, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 22.123552123552123, | |
| "grad_norm": 0.4403421878814697, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0621, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 22.16216216216216, | |
| "grad_norm": 0.38823920488357544, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0732, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 22.2007722007722, | |
| "grad_norm": 0.36218559741973877, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0517, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 22.23938223938224, | |
| "grad_norm": 0.34942883253097534, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.058, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 22.27799227799228, | |
| "grad_norm": 0.505549967288971, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0633, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 22.316602316602317, | |
| "grad_norm": 0.5539968013763428, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0809, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 22.355212355212355, | |
| "grad_norm": 0.5466805100440979, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0529, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 22.393822393822393, | |
| "grad_norm": 0.38340243697166443, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0687, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 22.43243243243243, | |
| "grad_norm": 0.39504724740982056, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0521, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 22.47104247104247, | |
| "grad_norm": 0.37831607460975647, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0546, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 22.50965250965251, | |
| "grad_norm": 0.5784722566604614, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0693, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 22.54826254826255, | |
| "grad_norm": 0.4978543221950531, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0557, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 22.586872586872587, | |
| "grad_norm": 0.3099800646305084, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0458, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 22.625482625482626, | |
| "grad_norm": 0.6736605167388916, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0687, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 22.664092664092664, | |
| "grad_norm": 0.45186126232147217, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0646, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 22.7027027027027, | |
| "grad_norm": 0.5107584595680237, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0666, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 22.74131274131274, | |
| "grad_norm": 0.45037683844566345, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0509, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 22.77992277992278, | |
| "grad_norm": 0.3888896405696869, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0731, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 22.81853281853282, | |
| "grad_norm": 0.3771021366119385, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0702, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 22.857142857142858, | |
| "grad_norm": 0.5772953033447266, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0602, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 22.895752895752896, | |
| "grad_norm": 0.40094444155693054, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0582, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 22.934362934362934, | |
| "grad_norm": 0.3225739598274231, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0542, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 22.972972972972972, | |
| "grad_norm": 0.7332587242126465, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0597, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 23.01158301158301, | |
| "grad_norm": 0.44024458527565, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0497, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 23.05019305019305, | |
| "grad_norm": 0.4732349216938019, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0578, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 23.08880308880309, | |
| "grad_norm": 0.4735555350780487, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0615, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 23.127413127413128, | |
| "grad_norm": 0.6857978105545044, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0718, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 23.166023166023166, | |
| "grad_norm": 0.3587701916694641, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.085, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 23.204633204633204, | |
| "grad_norm": 0.40470054745674133, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0608, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 23.243243243243242, | |
| "grad_norm": 0.5807958841323853, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0606, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 23.28185328185328, | |
| "grad_norm": 0.3811424672603607, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0594, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 23.320463320463322, | |
| "grad_norm": 0.44823238253593445, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0624, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 23.35907335907336, | |
| "grad_norm": 0.5011805295944214, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.069, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 23.397683397683398, | |
| "grad_norm": 0.5222461223602295, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0462, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 23.436293436293436, | |
| "grad_norm": 0.41890570521354675, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0552, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 23.474903474903474, | |
| "grad_norm": 0.6486966609954834, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0627, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 23.513513513513512, | |
| "grad_norm": 0.46851104497909546, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0536, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 23.552123552123554, | |
| "grad_norm": 0.5276613831520081, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0666, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 23.590733590733592, | |
| "grad_norm": 0.341413289308548, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0417, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 23.62934362934363, | |
| "grad_norm": 0.5024414658546448, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0511, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 23.667953667953668, | |
| "grad_norm": 0.27348798513412476, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0405, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 23.706563706563706, | |
| "grad_norm": 0.43044814467430115, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0548, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 23.745173745173744, | |
| "grad_norm": 0.28613966703414917, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0755, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 23.783783783783782, | |
| "grad_norm": 0.5436369180679321, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.071, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 23.822393822393824, | |
| "grad_norm": 0.5096984505653381, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0622, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 23.861003861003862, | |
| "grad_norm": 0.49167925119400024, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0551, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 23.8996138996139, | |
| "grad_norm": 0.35319849848747253, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0672, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 23.93822393822394, | |
| "grad_norm": 0.2771577537059784, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0439, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 23.976833976833976, | |
| "grad_norm": 0.49233347177505493, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0633, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 24.015444015444015, | |
| "grad_norm": 0.5419521927833557, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0461, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 24.054054054054053, | |
| "grad_norm": 0.37961897253990173, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0504, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 24.092664092664094, | |
| "grad_norm": 0.5190012454986572, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0507, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 24.131274131274132, | |
| "grad_norm": 0.38911232352256775, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0677, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 24.16988416988417, | |
| "grad_norm": 0.3622760474681854, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0525, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 24.20849420849421, | |
| "grad_norm": 0.5027076601982117, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.05, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 24.247104247104247, | |
| "grad_norm": 0.4265000820159912, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0543, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 24.285714285714285, | |
| "grad_norm": 0.34943899512290955, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0539, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 24.324324324324323, | |
| "grad_norm": 0.6790931224822998, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.06, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 24.362934362934364, | |
| "grad_norm": 0.45015794038772583, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0461, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 24.401544401544403, | |
| "grad_norm": 0.5135745406150818, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0569, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 24.44015444015444, | |
| "grad_norm": 0.6036981344223022, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.05, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 24.47876447876448, | |
| "grad_norm": 0.5034620761871338, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0585, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 24.517374517374517, | |
| "grad_norm": 0.4371446967124939, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0637, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 24.555984555984555, | |
| "grad_norm": 0.40289029479026794, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0559, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 24.594594594594593, | |
| "grad_norm": 0.5056316256523132, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0456, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 24.633204633204635, | |
| "grad_norm": 0.5891019701957703, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0464, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 24.671814671814673, | |
| "grad_norm": 0.6418840885162354, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.057, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 24.71042471042471, | |
| "grad_norm": 0.34795257449150085, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0554, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 24.74903474903475, | |
| "grad_norm": 0.43306779861450195, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0379, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 24.787644787644787, | |
| "grad_norm": 0.6308071613311768, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0525, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 24.826254826254825, | |
| "grad_norm": 0.37014514207839966, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0462, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 24.864864864864863, | |
| "grad_norm": 0.45822596549987793, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0506, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 24.903474903474905, | |
| "grad_norm": 0.505630612373352, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.047, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 24.942084942084943, | |
| "grad_norm": 0.39040684700012207, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0436, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 24.98069498069498, | |
| "grad_norm": 0.42719799280166626, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0472, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 25.01930501930502, | |
| "grad_norm": 0.3602113425731659, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0374, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 25.057915057915057, | |
| "grad_norm": 0.6470438838005066, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0607, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 25.096525096525095, | |
| "grad_norm": 0.5232688188552856, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0421, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 25.135135135135137, | |
| "grad_norm": 0.34110620617866516, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0414, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 25.173745173745175, | |
| "grad_norm": 0.5356219410896301, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0556, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 25.212355212355213, | |
| "grad_norm": 0.4498227834701538, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.05, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 25.25096525096525, | |
| "grad_norm": 0.41610923409461975, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0557, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 25.28957528957529, | |
| "grad_norm": 0.5128653645515442, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0476, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 25.328185328185327, | |
| "grad_norm": 0.5739153027534485, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0609, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 25.366795366795365, | |
| "grad_norm": 0.5832862854003906, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0524, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 25.405405405405407, | |
| "grad_norm": 0.7269383072853088, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0458, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 25.444015444015445, | |
| "grad_norm": 0.44224220514297485, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0604, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 25.482625482625483, | |
| "grad_norm": 0.6164863705635071, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0621, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 25.52123552123552, | |
| "grad_norm": 0.6670511364936829, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0456, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 25.55984555984556, | |
| "grad_norm": 0.3655743896961212, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0463, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 25.598455598455597, | |
| "grad_norm": 0.6558657288551331, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0405, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 25.637065637065636, | |
| "grad_norm": 0.5198159217834473, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0389, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 25.675675675675677, | |
| "grad_norm": 0.4935913383960724, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0693, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 25.714285714285715, | |
| "grad_norm": 0.4455951750278473, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0338, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 25.752895752895753, | |
| "grad_norm": 0.33703893423080444, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0427, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 25.79150579150579, | |
| "grad_norm": 0.41062918305397034, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0548, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 25.83011583011583, | |
| "grad_norm": 0.42094191908836365, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.059, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 25.868725868725868, | |
| "grad_norm": 0.635362446308136, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0473, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 25.907335907335906, | |
| "grad_norm": 0.5083866715431213, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0527, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 25.945945945945947, | |
| "grad_norm": 0.4608486592769623, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0644, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 25.984555984555985, | |
| "grad_norm": 0.5259851813316345, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0507, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 26.023166023166024, | |
| "grad_norm": 0.35150691866874695, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0622, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 26.06177606177606, | |
| "grad_norm": 0.4495721459388733, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0443, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 26.1003861003861, | |
| "grad_norm": 0.29612305760383606, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0425, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 26.138996138996138, | |
| "grad_norm": 0.37452226877212524, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.043, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 26.177606177606176, | |
| "grad_norm": 0.5638911128044128, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0727, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 26.216216216216218, | |
| "grad_norm": 0.3986194133758545, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0346, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 26.254826254826256, | |
| "grad_norm": 1.514148473739624, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0596, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 26.293436293436294, | |
| "grad_norm": 0.2974415123462677, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0349, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 26.332046332046332, | |
| "grad_norm": 0.4633919894695282, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0632, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 26.37065637065637, | |
| "grad_norm": 0.5830386877059937, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0486, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 26.409266409266408, | |
| "grad_norm": 0.5291124582290649, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0613, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 26.447876447876446, | |
| "grad_norm": 0.3293599784374237, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0446, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 26.486486486486488, | |
| "grad_norm": 0.4320952892303467, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0363, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 26.525096525096526, | |
| "grad_norm": 0.4750666916370392, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0655, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 26.563706563706564, | |
| "grad_norm": 0.47334954142570496, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0547, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 26.602316602316602, | |
| "grad_norm": 0.35568034648895264, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0424, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 26.64092664092664, | |
| "grad_norm": 0.5133526921272278, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0598, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 26.679536679536678, | |
| "grad_norm": 0.5015846490859985, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0381, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 26.71814671814672, | |
| "grad_norm": 0.5141738057136536, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0587, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 26.756756756756758, | |
| "grad_norm": 0.399676114320755, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0483, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 26.795366795366796, | |
| "grad_norm": 0.6744953989982605, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0374, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 26.833976833976834, | |
| "grad_norm": 0.44455859065055847, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0381, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 26.872586872586872, | |
| "grad_norm": 0.4645119905471802, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0485, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 26.91119691119691, | |
| "grad_norm": 0.35686275362968445, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0501, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 26.94980694980695, | |
| "grad_norm": 0.6142265200614929, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.051, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 26.98841698841699, | |
| "grad_norm": 0.5292761325836182, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0373, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 27.027027027027028, | |
| "grad_norm": 0.45108911395072937, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0453, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 27.065637065637066, | |
| "grad_norm": 0.4141083061695099, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0583, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 27.104247104247104, | |
| "grad_norm": 0.4025471806526184, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0479, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 27.142857142857142, | |
| "grad_norm": 0.5654422640800476, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0686, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 27.18146718146718, | |
| "grad_norm": 0.2999004125595093, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0506, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 27.22007722007722, | |
| "grad_norm": 0.5117769837379456, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.037, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 27.25868725868726, | |
| "grad_norm": 0.39674946665763855, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0416, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 27.2972972972973, | |
| "grad_norm": 0.514123260974884, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0497, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 27.335907335907336, | |
| "grad_norm": 0.49236828088760376, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0759, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 27.374517374517374, | |
| "grad_norm": 0.7245543003082275, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0586, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 27.413127413127413, | |
| "grad_norm": 0.6243254542350769, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.033, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 27.45173745173745, | |
| "grad_norm": 0.389565646648407, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0678, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 27.49034749034749, | |
| "grad_norm": 0.5558807849884033, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0341, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 27.52895752895753, | |
| "grad_norm": 0.5637540817260742, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.043, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 27.56756756756757, | |
| "grad_norm": 0.4027647078037262, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0668, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 27.606177606177607, | |
| "grad_norm": 0.42507487535476685, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0532, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 27.644787644787645, | |
| "grad_norm": 0.5454823970794678, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0564, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 27.683397683397683, | |
| "grad_norm": 0.5077855587005615, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0483, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 27.72200772200772, | |
| "grad_norm": 0.7553500533103943, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0396, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 27.760617760617762, | |
| "grad_norm": 0.4641231596469879, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0472, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 27.7992277992278, | |
| "grad_norm": 0.5995585918426514, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0478, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 27.83783783783784, | |
| "grad_norm": 0.370569109916687, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0475, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 27.876447876447877, | |
| "grad_norm": 0.8217543363571167, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0509, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 27.915057915057915, | |
| "grad_norm": 0.38071209192276, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0393, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 27.953667953667953, | |
| "grad_norm": 0.7309401631355286, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0496, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 27.99227799227799, | |
| "grad_norm": 0.46595320105552673, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0529, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 28.030888030888033, | |
| "grad_norm": 0.5158835053443909, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.036, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 28.06949806949807, | |
| "grad_norm": 0.2889738976955414, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.031, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 28.10810810810811, | |
| "grad_norm": 0.5699809789657593, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0409, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 28.146718146718147, | |
| "grad_norm": 0.30492979288101196, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0533, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 28.185328185328185, | |
| "grad_norm": 0.858198344707489, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0536, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 28.223938223938223, | |
| "grad_norm": 0.531909704208374, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0424, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 28.26254826254826, | |
| "grad_norm": 1.3592851161956787, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0361, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 28.301158301158303, | |
| "grad_norm": 0.42610982060432434, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0452, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 28.33976833976834, | |
| "grad_norm": 0.33658021688461304, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0461, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 28.37837837837838, | |
| "grad_norm": 0.506962239742279, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0381, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 28.416988416988417, | |
| "grad_norm": 0.4352479577064514, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0382, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 28.455598455598455, | |
| "grad_norm": 0.4303007125854492, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0482, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 28.494208494208493, | |
| "grad_norm": 0.40790390968322754, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0411, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 28.53281853281853, | |
| "grad_norm": 0.4553315341472626, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0485, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 28.571428571428573, | |
| "grad_norm": 0.47120049595832825, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0433, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 28.61003861003861, | |
| "grad_norm": 0.5280634164810181, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0438, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 28.64864864864865, | |
| "grad_norm": 0.43494147062301636, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0649, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 28.687258687258687, | |
| "grad_norm": 0.31125617027282715, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.049, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 28.725868725868725, | |
| "grad_norm": 0.49624380469322205, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0387, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 28.764478764478763, | |
| "grad_norm": 0.641543447971344, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0433, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 28.8030888030888, | |
| "grad_norm": 0.3550838530063629, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0528, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 28.841698841698843, | |
| "grad_norm": 0.44131937623023987, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0436, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 28.88030888030888, | |
| "grad_norm": 1.983064889907837, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.057, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 28.91891891891892, | |
| "grad_norm": 0.5690191984176636, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0486, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 28.957528957528957, | |
| "grad_norm": 0.37668266892433167, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0331, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 28.996138996138995, | |
| "grad_norm": 0.5062463283538818, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.034, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 29.034749034749034, | |
| "grad_norm": 0.26328518986701965, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0299, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 29.07335907335907, | |
| "grad_norm": 0.38508641719818115, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0351, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 29.111969111969113, | |
| "grad_norm": 0.3345756232738495, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.043, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 29.15057915057915, | |
| "grad_norm": 0.505284309387207, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0424, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 29.18918918918919, | |
| "grad_norm": 0.5906921029090881, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0454, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 29.227799227799228, | |
| "grad_norm": 0.44763216376304626, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0478, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 29.266409266409266, | |
| "grad_norm": 0.581728994846344, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0576, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 29.305019305019304, | |
| "grad_norm": 0.6709556579589844, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0459, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 29.343629343629345, | |
| "grad_norm": 0.4280284345149994, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0564, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 29.382239382239383, | |
| "grad_norm": 0.3624938130378723, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0525, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 29.42084942084942, | |
| "grad_norm": 0.5082321763038635, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0408, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 29.45945945945946, | |
| "grad_norm": 0.469899445772171, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0497, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 29.498069498069498, | |
| "grad_norm": 0.6883857846260071, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0433, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 29.536679536679536, | |
| "grad_norm": 0.5980105996131897, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0352, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 29.575289575289574, | |
| "grad_norm": 0.5563459992408752, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0325, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 29.613899613899616, | |
| "grad_norm": 0.38310402631759644, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0433, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 29.652509652509654, | |
| "grad_norm": 0.6001343727111816, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0499, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 29.69111969111969, | |
| "grad_norm": 0.36467060446739197, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0367, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 29.72972972972973, | |
| "grad_norm": 0.44829538464546204, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0318, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 29.768339768339768, | |
| "grad_norm": 0.563353419303894, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.048, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 29.806949806949806, | |
| "grad_norm": 0.3578076958656311, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0485, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 29.845559845559844, | |
| "grad_norm": 0.4575956165790558, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0439, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 29.884169884169886, | |
| "grad_norm": 0.549997866153717, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0443, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 29.922779922779924, | |
| "grad_norm": 0.7762444019317627, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.034, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 29.961389961389962, | |
| "grad_norm": 0.4942251741886139, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.047, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.3711082935333252, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0515, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 30.038610038610038, | |
| "grad_norm": 0.5000542402267456, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0403, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 30.077220077220076, | |
| "grad_norm": 0.675679087638855, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0471, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 30.115830115830114, | |
| "grad_norm": 0.5804657936096191, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0434, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 30.154440154440156, | |
| "grad_norm": 0.40390142798423767, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0469, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 30.193050193050194, | |
| "grad_norm": 0.2912136912345886, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0293, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 30.231660231660232, | |
| "grad_norm": 0.49417203664779663, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0398, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 30.27027027027027, | |
| "grad_norm": 0.4873831570148468, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0309, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 30.30888030888031, | |
| "grad_norm": 0.640707790851593, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0428, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 30.347490347490346, | |
| "grad_norm": 0.44811660051345825, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0469, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 30.386100386100384, | |
| "grad_norm": 0.4043349027633667, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0499, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 30.424710424710426, | |
| "grad_norm": 0.4621827006340027, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0462, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 30.463320463320464, | |
| "grad_norm": 0.35089004039764404, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0421, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 30.501930501930502, | |
| "grad_norm": 0.3911316394805908, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0518, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 30.54054054054054, | |
| "grad_norm": 0.24839645624160767, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0502, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 30.57915057915058, | |
| "grad_norm": 0.4422091245651245, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0579, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 30.617760617760617, | |
| "grad_norm": 0.40578359365463257, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0391, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 30.656370656370655, | |
| "grad_norm": 0.47507625818252563, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0458, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 30.694980694980696, | |
| "grad_norm": 0.3613755702972412, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0318, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 30.733590733590734, | |
| "grad_norm": 0.3082832992076874, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.036, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 30.772200772200772, | |
| "grad_norm": 0.4993591904640198, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0312, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 30.81081081081081, | |
| "grad_norm": 0.4567050635814667, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0366, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 30.84942084942085, | |
| "grad_norm": 0.36421293020248413, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.034, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 30.888030888030887, | |
| "grad_norm": 0.7462463974952698, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0332, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 30.92664092664093, | |
| "grad_norm": 0.4237750470638275, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0333, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 30.965250965250966, | |
| "grad_norm": 0.7147374153137207, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0452, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 31.003861003861005, | |
| "grad_norm": 0.23559176921844482, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0364, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 31.042471042471043, | |
| "grad_norm": 0.5721344351768494, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0343, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 31.08108108108108, | |
| "grad_norm": 0.38582655787467957, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0451, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 31.11969111969112, | |
| "grad_norm": 0.19999687373638153, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0284, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 31.158301158301157, | |
| "grad_norm": 0.5094717144966125, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0356, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 31.1969111969112, | |
| "grad_norm": 0.46238014101982117, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0508, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 31.235521235521237, | |
| "grad_norm": 0.3049578070640564, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0372, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 31.274131274131275, | |
| "grad_norm": 0.5652112364768982, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0449, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 31.312741312741313, | |
| "grad_norm": 0.47629275918006897, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0424, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 31.35135135135135, | |
| "grad_norm": 0.34560078382492065, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0379, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 31.38996138996139, | |
| "grad_norm": 0.4096907079219818, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0566, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 31.428571428571427, | |
| "grad_norm": 0.4206790328025818, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0416, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 31.46718146718147, | |
| "grad_norm": 0.4509584307670593, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0364, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 31.505791505791507, | |
| "grad_norm": 0.35847026109695435, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0466, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 31.544401544401545, | |
| "grad_norm": 0.21652787923812866, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0418, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 31.583011583011583, | |
| "grad_norm": 0.504115879535675, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0342, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 31.62162162162162, | |
| "grad_norm": 0.5375120639801025, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0432, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 31.66023166023166, | |
| "grad_norm": 0.5640621185302734, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0351, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 31.698841698841697, | |
| "grad_norm": 0.3777157664299011, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0268, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 31.73745173745174, | |
| "grad_norm": 0.2588464021682739, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0364, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 31.776061776061777, | |
| "grad_norm": 0.3287317454814911, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0356, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 31.814671814671815, | |
| "grad_norm": 0.2392064929008484, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0352, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 31.853281853281853, | |
| "grad_norm": 0.3696592152118683, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0428, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 31.89189189189189, | |
| "grad_norm": 0.4237655997276306, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0279, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 31.93050193050193, | |
| "grad_norm": 0.4059978127479553, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0284, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 31.969111969111967, | |
| "grad_norm": 0.5361632704734802, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0389, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 32.00772200772201, | |
| "grad_norm": 0.3683216869831085, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0284, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 32.04633204633205, | |
| "grad_norm": 0.41536685824394226, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0414, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 32.084942084942085, | |
| "grad_norm": 0.6054041385650635, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0345, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 32.12355212355212, | |
| "grad_norm": 0.4750712513923645, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0252, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 32.16216216216216, | |
| "grad_norm": 0.6487170457839966, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0362, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 32.2007722007722, | |
| "grad_norm": 0.2815876305103302, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0318, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 32.23938223938224, | |
| "grad_norm": 0.4896615743637085, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0391, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 32.277992277992276, | |
| "grad_norm": 0.36150723695755005, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0273, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 32.316602316602314, | |
| "grad_norm": 0.39038848876953125, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0302, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 32.35521235521235, | |
| "grad_norm": 0.5400213003158569, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0436, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 32.3938223938224, | |
| "grad_norm": 0.41952595114707947, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0313, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 32.432432432432435, | |
| "grad_norm": 0.4260327219963074, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0305, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 32.47104247104247, | |
| "grad_norm": 0.6441579461097717, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0314, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 32.50965250965251, | |
| "grad_norm": 0.3133586049079895, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0292, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 32.54826254826255, | |
| "grad_norm": 0.49595722556114197, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0358, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 32.58687258687259, | |
| "grad_norm": 0.3354673385620117, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0313, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 32.625482625482626, | |
| "grad_norm": 0.5179545879364014, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.036, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 32.664092664092664, | |
| "grad_norm": 0.36871305108070374, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0316, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 32.7027027027027, | |
| "grad_norm": 0.324504017829895, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.03, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 32.74131274131274, | |
| "grad_norm": 0.4725330173969269, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.029, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 32.77992277992278, | |
| "grad_norm": 0.5182196497917175, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0325, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 32.818532818532816, | |
| "grad_norm": 0.5111414194107056, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0534, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 32.857142857142854, | |
| "grad_norm": 0.8664683103561401, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0393, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 32.89575289575289, | |
| "grad_norm": 0.4252089262008667, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0268, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 32.93436293436294, | |
| "grad_norm": 0.3136313855648041, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0379, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 32.972972972972975, | |
| "grad_norm": 0.5326577425003052, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0379, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 33.011583011583014, | |
| "grad_norm": 0.3791945278644562, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.036, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 33.05019305019305, | |
| "grad_norm": 0.4695245325565338, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0379, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 33.08880308880309, | |
| "grad_norm": 0.5764884948730469, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0662, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 33.12741312741313, | |
| "grad_norm": 0.672602653503418, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0504, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 33.166023166023166, | |
| "grad_norm": 1.0906554460525513, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0463, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 33.204633204633204, | |
| "grad_norm": 0.4323593080043793, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0279, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 33.24324324324324, | |
| "grad_norm": 0.5862171053886414, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0281, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 33.28185328185328, | |
| "grad_norm": 0.6559461355209351, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0371, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 33.32046332046332, | |
| "grad_norm": 0.47020477056503296, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0325, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 33.359073359073356, | |
| "grad_norm": 0.5820066928863525, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0387, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 33.397683397683394, | |
| "grad_norm": 0.5221219658851624, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0512, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 33.43629343629344, | |
| "grad_norm": 0.7113184928894043, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0363, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 33.47490347490348, | |
| "grad_norm": 0.22089901566505432, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0282, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 33.513513513513516, | |
| "grad_norm": 0.38262519240379333, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.022, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 33.552123552123554, | |
| "grad_norm": 0.5200034379959106, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0291, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 33.59073359073359, | |
| "grad_norm": 0.20605699717998505, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0279, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 33.62934362934363, | |
| "grad_norm": 0.5793619751930237, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0413, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 33.66795366795367, | |
| "grad_norm": 0.5311439633369446, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0451, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 33.706563706563706, | |
| "grad_norm": 0.298466295003891, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0257, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 33.745173745173744, | |
| "grad_norm": 0.332709401845932, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0291, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 33.78378378378378, | |
| "grad_norm": 0.34630295634269714, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0386, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 33.82239382239382, | |
| "grad_norm": 0.4650475084781647, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0313, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 33.86100386100386, | |
| "grad_norm": 0.5013532042503357, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0374, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 33.8996138996139, | |
| "grad_norm": 0.33382976055145264, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0432, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 33.938223938223935, | |
| "grad_norm": 0.2519553303718567, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0314, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 33.97683397683398, | |
| "grad_norm": 0.44277212023735046, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0254, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 34.01544401544402, | |
| "grad_norm": 0.5330662131309509, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0301, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 34.054054054054056, | |
| "grad_norm": 0.6141893863677979, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.032, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 34.092664092664094, | |
| "grad_norm": 0.5341497659683228, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0283, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 34.13127413127413, | |
| "grad_norm": 0.5615679621696472, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0347, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 34.16988416988417, | |
| "grad_norm": 0.2762456238269806, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0266, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 34.20849420849421, | |
| "grad_norm": 0.6467777490615845, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0359, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 34.24710424710425, | |
| "grad_norm": 0.6442224383354187, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0316, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 34.285714285714285, | |
| "grad_norm": 0.4205634295940399, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0342, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 34.32432432432432, | |
| "grad_norm": 0.5289274454116821, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0356, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 34.36293436293436, | |
| "grad_norm": 0.4894663691520691, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0309, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 34.4015444015444, | |
| "grad_norm": 0.45998555421829224, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.035, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 34.44015444015444, | |
| "grad_norm": 0.7644488215446472, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0457, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 34.47876447876448, | |
| "grad_norm": 0.5515457987785339, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0303, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 34.51737451737452, | |
| "grad_norm": 0.31745579838752747, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0372, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 34.55598455598456, | |
| "grad_norm": 0.4758061468601227, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0283, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 34.5945945945946, | |
| "grad_norm": 0.25310981273651123, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0199, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 34.633204633204635, | |
| "grad_norm": 0.26150646805763245, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0252, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 34.67181467181467, | |
| "grad_norm": 0.24514614045619965, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0319, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 34.71042471042471, | |
| "grad_norm": 0.6540687680244446, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0524, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 34.74903474903475, | |
| "grad_norm": 0.3403765559196472, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.037, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 34.78764478764479, | |
| "grad_norm": 0.46332094073295593, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0238, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 34.826254826254825, | |
| "grad_norm": 0.5999321341514587, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0405, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 34.86486486486486, | |
| "grad_norm": 0.35631102323532104, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0497, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 34.9034749034749, | |
| "grad_norm": 0.33276018500328064, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0385, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 34.94208494208494, | |
| "grad_norm": 0.32796597480773926, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0305, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 34.98069498069498, | |
| "grad_norm": 0.38111504912376404, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0368, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 35.01930501930502, | |
| "grad_norm": 0.3923816680908203, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0191, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 35.05791505791506, | |
| "grad_norm": 0.2567480206489563, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.028, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 35.0965250965251, | |
| "grad_norm": 0.48664647340774536, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0361, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 35.13513513513514, | |
| "grad_norm": 0.4539981186389923, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0419, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 35.173745173745175, | |
| "grad_norm": 0.8542642593383789, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0446, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 35.21235521235521, | |
| "grad_norm": 0.5568941235542297, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0351, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 35.25096525096525, | |
| "grad_norm": 0.600436270236969, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0442, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 35.28957528957529, | |
| "grad_norm": 0.5558466911315918, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0257, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 35.32818532818533, | |
| "grad_norm": 0.6728957295417786, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0514, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 35.366795366795365, | |
| "grad_norm": 0.4663664996623993, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0261, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 35.4054054054054, | |
| "grad_norm": 0.562464714050293, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0307, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 35.44401544401544, | |
| "grad_norm": 0.4802253544330597, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.029, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 35.48262548262548, | |
| "grad_norm": 0.32845574617385864, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0217, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 35.52123552123552, | |
| "grad_norm": 0.6012297868728638, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0391, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 35.55984555984556, | |
| "grad_norm": 0.3478630781173706, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0419, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 35.5984555984556, | |
| "grad_norm": 0.29113438725471497, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.035, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 35.63706563706564, | |
| "grad_norm": 0.7811099886894226, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0357, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 35.67567567567568, | |
| "grad_norm": 0.2843995690345764, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0404, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 35.714285714285715, | |
| "grad_norm": 0.3492255210876465, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0207, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 35.75289575289575, | |
| "grad_norm": 0.23177163302898407, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0256, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 35.79150579150579, | |
| "grad_norm": 0.4552899897098541, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0285, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 35.83011583011583, | |
| "grad_norm": 0.4950767159461975, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.031, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 35.86872586872587, | |
| "grad_norm": 0.7315733432769775, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.0388, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 35.907335907335906, | |
| "grad_norm": 0.4851663112640381, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.044, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 35.945945945945944, | |
| "grad_norm": 0.4416690468788147, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0285, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 35.98455598455598, | |
| "grad_norm": 0.3902464509010315, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0523, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 36.02316602316602, | |
| "grad_norm": 0.5559689998626709, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0327, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 36.061776061776065, | |
| "grad_norm": 0.3863861560821533, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0371, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 36.1003861003861, | |
| "grad_norm": 0.4584692418575287, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0299, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 36.13899613899614, | |
| "grad_norm": 0.5071645379066467, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0293, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 36.17760617760618, | |
| "grad_norm": 0.6098319888114929, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0498, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 36.21621621621622, | |
| "grad_norm": 0.4361838698387146, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0309, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 36.254826254826256, | |
| "grad_norm": 0.5391000509262085, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.038, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 36.293436293436294, | |
| "grad_norm": 0.5742671489715576, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.045, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 36.33204633204633, | |
| "grad_norm": 0.40355831384658813, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0405, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 36.37065637065637, | |
| "grad_norm": 0.549338698387146, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0351, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 36.40926640926641, | |
| "grad_norm": 0.22841085493564606, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0275, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 36.447876447876446, | |
| "grad_norm": 0.49053749442100525, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0328, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 36.486486486486484, | |
| "grad_norm": 0.2764204442501068, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0229, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 36.52509652509652, | |
| "grad_norm": 0.41354867815971375, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0281, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 36.56370656370656, | |
| "grad_norm": 0.2841521203517914, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0479, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 36.602316602316606, | |
| "grad_norm": 0.13798123598098755, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0241, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 36.640926640926644, | |
| "grad_norm": 0.20515076816082, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.029, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 36.67953667953668, | |
| "grad_norm": 0.3081696629524231, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0216, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 36.71814671814672, | |
| "grad_norm": 0.32960227131843567, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.025, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 36.75675675675676, | |
| "grad_norm": 0.3410300314426422, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0305, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 36.795366795366796, | |
| "grad_norm": 0.549889862537384, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.038, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 36.833976833976834, | |
| "grad_norm": 0.7164877653121948, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0465, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 36.87258687258687, | |
| "grad_norm": 0.554248571395874, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0319, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 36.91119691119691, | |
| "grad_norm": 0.4524341821670532, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.043, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 36.94980694980695, | |
| "grad_norm": 0.5518279671669006, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0536, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 36.988416988416986, | |
| "grad_norm": 0.6280217170715332, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0353, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 37.027027027027025, | |
| "grad_norm": 0.5065165162086487, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0264, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 37.06563706563706, | |
| "grad_norm": 0.40988031029701233, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0284, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 37.1042471042471, | |
| "grad_norm": 0.5161091685295105, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.032, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 37.142857142857146, | |
| "grad_norm": 0.5621913075447083, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0375, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 37.181467181467184, | |
| "grad_norm": 0.4827399253845215, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0315, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 37.22007722007722, | |
| "grad_norm": 0.651488721370697, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0201, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 37.25868725868726, | |
| "grad_norm": 0.3131031394004822, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0245, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 37.2972972972973, | |
| "grad_norm": 0.3043268322944641, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0299, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 37.335907335907336, | |
| "grad_norm": 0.4199933409690857, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0468, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 37.374517374517374, | |
| "grad_norm": 0.28356441855430603, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0255, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 37.41312741312741, | |
| "grad_norm": 0.41857004165649414, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0297, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 37.45173745173745, | |
| "grad_norm": 0.3036278486251831, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0274, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 37.49034749034749, | |
| "grad_norm": 0.5052376389503479, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0276, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 37.52895752895753, | |
| "grad_norm": 0.4760342836380005, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.034, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 37.567567567567565, | |
| "grad_norm": 0.5873331427574158, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0449, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 37.6061776061776, | |
| "grad_norm": 0.5967539548873901, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0298, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 37.64478764478765, | |
| "grad_norm": 0.33010393381118774, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0337, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 37.683397683397686, | |
| "grad_norm": 0.4182218909263611, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.035, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 37.722007722007724, | |
| "grad_norm": 0.42870447039604187, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.036, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 37.76061776061776, | |
| "grad_norm": 0.5004497170448303, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0332, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 37.7992277992278, | |
| "grad_norm": 0.5800615549087524, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0353, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 37.83783783783784, | |
| "grad_norm": 0.3894782066345215, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0274, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 37.87644787644788, | |
| "grad_norm": 0.4974846839904785, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.036, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 37.915057915057915, | |
| "grad_norm": 0.23191875219345093, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0337, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 37.95366795366795, | |
| "grad_norm": 0.4186077415943146, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0352, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 37.99227799227799, | |
| "grad_norm": 0.4193514585494995, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.043, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 38.03088803088803, | |
| "grad_norm": 0.4409830868244171, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.0399, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 38.06949806949807, | |
| "grad_norm": 0.5391518473625183, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0236, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 38.108108108108105, | |
| "grad_norm": 0.4023591876029968, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0281, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 38.14671814671814, | |
| "grad_norm": 0.39196234941482544, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0408, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 38.18532818532819, | |
| "grad_norm": 0.4986127018928528, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.027, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 38.22393822393823, | |
| "grad_norm": 0.6418594717979431, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0354, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 38.262548262548265, | |
| "grad_norm": 0.324028879404068, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0377, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 38.3011583011583, | |
| "grad_norm": 0.30436280369758606, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0422, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 38.33976833976834, | |
| "grad_norm": 0.5513790249824524, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0288, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 38.37837837837838, | |
| "grad_norm": 0.3686314821243286, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0329, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 38.41698841698842, | |
| "grad_norm": 0.33329352736473083, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0195, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 38.455598455598455, | |
| "grad_norm": 0.4355686604976654, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0265, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 38.49420849420849, | |
| "grad_norm": 0.3596385419368744, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0319, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 38.53281853281853, | |
| "grad_norm": 0.4059530198574066, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0335, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 38.57142857142857, | |
| "grad_norm": 0.6819077730178833, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0307, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 38.61003861003861, | |
| "grad_norm": 0.4189014136791229, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0401, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 38.61003861003861, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.08432228177487851, | |
| "train_runtime": 8918.6677, | |
| "train_samples_per_second": 17.94, | |
| "train_steps_per_second": 1.121 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 39, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |