| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 801, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 4.050421697822149, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 1.1544, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 3.4522491247185525, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 1.1683, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 4.209953771699016, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 1.1475, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 3.2116476352040837, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 1.1549, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.0260957864359623, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.1239, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.1512192994944326, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 1.1077, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 1.8065571184144953, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 1.0738, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 1.8266262708390884, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 1.0561, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.9759219949909417, | |
| "learning_rate": 7.2000000000000005e-06, | |
| "loss": 1.0237, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.052405545359168, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.9659, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.0087710253035542, | |
| "learning_rate": 8.8e-06, | |
| "loss": 1.044, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.6850246498093417, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 0.9758, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.1615561768753055, | |
| "learning_rate": 1.04e-05, | |
| "loss": 0.9934, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.7564042577770114, | |
| "learning_rate": 1.1200000000000001e-05, | |
| "loss": 1.0311, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.3272069105236235, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.9553, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.688411223891557, | |
| "learning_rate": 1.2800000000000001e-05, | |
| "loss": 0.9442, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.528134950351535, | |
| "learning_rate": 1.3600000000000002e-05, | |
| "loss": 0.9862, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.5328973752393094, | |
| "learning_rate": 1.4400000000000001e-05, | |
| "loss": 0.9502, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.2847554516079198, | |
| "learning_rate": 1.5200000000000002e-05, | |
| "loss": 0.9205, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.4221662366255885, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.9591, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.255367628510996, | |
| "learning_rate": 1.6800000000000002e-05, | |
| "loss": 0.9001, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.148047804296119, | |
| "learning_rate": 1.76e-05, | |
| "loss": 0.9824, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 0.9917836271027393, | |
| "learning_rate": 1.8400000000000003e-05, | |
| "loss": 0.8932, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.0823142111067903, | |
| "learning_rate": 1.9200000000000003e-05, | |
| "loss": 0.9214, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.0569825941940303, | |
| "learning_rate": 2e-05, | |
| "loss": 0.8773, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.1512649834469049, | |
| "learning_rate": 1.999991805061211e-05, | |
| "loss": 0.8734, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 0.877680370813094, | |
| "learning_rate": 1.9999672203791564e-05, | |
| "loss": 0.8979, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 0.9089963370162624, | |
| "learning_rate": 1.9999262463567772e-05, | |
| "loss": 0.9686, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.9454114965461798, | |
| "learning_rate": 1.9998688836656322e-05, | |
| "loss": 0.8522, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.0221925378438834, | |
| "learning_rate": 1.999795133245889e-05, | |
| "loss": 0.8927, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.7560039515035071, | |
| "learning_rate": 1.999704996306308e-05, | |
| "loss": 0.8841, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.8177802771211006, | |
| "learning_rate": 1.999598474324223e-05, | |
| "loss": 0.8383, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.0409463245472501, | |
| "learning_rate": 1.9994755690455154e-05, | |
| "loss": 0.9304, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.8314907519172736, | |
| "learning_rate": 1.9993362824845878e-05, | |
| "loss": 0.8596, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.7233397253368891, | |
| "learning_rate": 1.9991806169243302e-05, | |
| "loss": 0.8474, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.7250793663189042, | |
| "learning_rate": 1.999008574916082e-05, | |
| "loss": 0.8506, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.8091084445628496, | |
| "learning_rate": 1.998820159279591e-05, | |
| "loss": 0.8244, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.7578973831164427, | |
| "learning_rate": 1.9986153731029657e-05, | |
| "loss": 0.8457, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6638759236574057, | |
| "learning_rate": 1.9983942197426272e-05, | |
| "loss": 0.8406, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6896497026906081, | |
| "learning_rate": 1.9981567028232514e-05, | |
| "loss": 0.9, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6414383119060907, | |
| "learning_rate": 1.997902826237712e-05, | |
| "loss": 0.8667, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.728045432246043, | |
| "learning_rate": 1.9976325941470147e-05, | |
| "loss": 0.8454, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.7288618347232488, | |
| "learning_rate": 1.9973460109802306e-05, | |
| "loss": 0.8109, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6374436993894802, | |
| "learning_rate": 1.997043081434423e-05, | |
| "loss": 0.8208, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.6891585415522739, | |
| "learning_rate": 1.9967238104745695e-05, | |
| "loss": 0.8272, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.6869606836622446, | |
| "learning_rate": 1.9963882033334827e-05, | |
| "loss": 0.869, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.6909918132711272, | |
| "learning_rate": 1.996036265511722e-05, | |
| "loss": 0.8229, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.5939328229579649, | |
| "learning_rate": 1.9956680027775054e-05, | |
| "loss": 0.8408, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.659777802804195, | |
| "learning_rate": 1.995283421166614e-05, | |
| "loss": 0.795, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.630173617841524, | |
| "learning_rate": 1.9948825269822934e-05, | |
| "loss": 0.8446, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.8717985826540854, | |
| "learning_rate": 1.9944653267951507e-05, | |
| "loss": 0.7986, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.59493509124484, | |
| "learning_rate": 1.994031827443045e-05, | |
| "loss": 0.8134, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.6740076785755504, | |
| "learning_rate": 1.993582036030978e-05, | |
| "loss": 0.8064, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.6410642565408483, | |
| "learning_rate": 1.9931159599309757e-05, | |
| "loss": 0.8323, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.6720779223299492, | |
| "learning_rate": 1.9926336067819686e-05, | |
| "loss": 0.8256, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.730476801871339, | |
| "learning_rate": 1.9921349844896655e-05, | |
| "loss": 0.789, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.595837869201607, | |
| "learning_rate": 1.9916201012264255e-05, | |
| "loss": 0.8144, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.8127953128220716, | |
| "learning_rate": 1.991088965431121e-05, | |
| "loss": 0.8796, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.6175756933005155, | |
| "learning_rate": 1.9905415858090036e-05, | |
| "loss": 0.8175, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.6700936304713285, | |
| "learning_rate": 1.9899779713315577e-05, | |
| "loss": 0.8098, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.6040877421514723, | |
| "learning_rate": 1.9893981312363563e-05, | |
| "loss": 0.7949, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.720386605678337, | |
| "learning_rate": 1.9888020750269067e-05, | |
| "loss": 0.774, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.609758482071129, | |
| "learning_rate": 1.988189812472498e-05, | |
| "loss": 0.8066, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.7399468596495679, | |
| "learning_rate": 1.987561353608038e-05, | |
| "loss": 0.8534, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.6288843677336958, | |
| "learning_rate": 1.9869167087338908e-05, | |
| "loss": 0.8093, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.6832779153361084, | |
| "learning_rate": 1.9862558884157067e-05, | |
| "loss": 0.7917, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.7330332971897311, | |
| "learning_rate": 1.9855789034842504e-05, | |
| "loss": 0.7978, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.6972813519695797, | |
| "learning_rate": 1.9848857650352213e-05, | |
| "loss": 0.7741, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.6358450195025426, | |
| "learning_rate": 1.9841764844290744e-05, | |
| "loss": 0.8011, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.6896477747547599, | |
| "learning_rate": 1.9834510732908314e-05, | |
| "loss": 0.7917, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.6710283082370337, | |
| "learning_rate": 1.9827095435098926e-05, | |
| "loss": 0.7652, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.7141801906145083, | |
| "learning_rate": 1.9819519072398397e-05, | |
| "loss": 0.8508, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.6639436013137763, | |
| "learning_rate": 1.9811781768982392e-05, | |
| "loss": 0.7821, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.5477095114515034, | |
| "learning_rate": 1.980388365166436e-05, | |
| "loss": 0.7874, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.6945810654587365, | |
| "learning_rate": 1.9795824849893483e-05, | |
| "loss": 0.7965, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.7217797206059553, | |
| "learning_rate": 1.9787605495752528e-05, | |
| "loss": 0.7692, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.6684061578707455, | |
| "learning_rate": 1.977922572395571e-05, | |
| "loss": 0.7969, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.6707150017766241, | |
| "learning_rate": 1.977068567184646e-05, | |
| "loss": 0.8307, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.7717208943525561, | |
| "learning_rate": 1.976198547939518e-05, | |
| "loss": 0.757, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.58820913965947, | |
| "learning_rate": 1.975312528919697e-05, | |
| "loss": 0.8038, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.7146796204632543, | |
| "learning_rate": 1.9744105246469264e-05, | |
| "loss": 0.797, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.6743555560911234, | |
| "learning_rate": 1.9734925499049446e-05, | |
| "loss": 0.7683, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.6195095279399262, | |
| "learning_rate": 1.972558619739246e-05, | |
| "loss": 0.851, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.6947888571089916, | |
| "learning_rate": 1.9716087494568318e-05, | |
| "loss": 0.7964, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.7498924442216305, | |
| "learning_rate": 1.9706429546259592e-05, | |
| "loss": 0.7475, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.6477658851669421, | |
| "learning_rate": 1.9696612510758878e-05, | |
| "loss": 0.7832, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.7473123140056833, | |
| "learning_rate": 1.9686636548966177e-05, | |
| "loss": 0.7764, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.749162547762247, | |
| "learning_rate": 1.9676501824386295e-05, | |
| "loss": 0.7457, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.6956559440461865, | |
| "learning_rate": 1.9666208503126115e-05, | |
| "loss": 0.7895, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.7085425092435499, | |
| "learning_rate": 1.9655756753891916e-05, | |
| "loss": 0.8317, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.8582717663487707, | |
| "learning_rate": 1.964514674798659e-05, | |
| "loss": 0.7894, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.6621588825705549, | |
| "learning_rate": 1.9634378659306834e-05, | |
| "loss": 0.7851, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.7797275848414665, | |
| "learning_rate": 1.9623452664340305e-05, | |
| "loss": 0.7698, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.8540541196734044, | |
| "learning_rate": 1.9612368942162717e-05, | |
| "loss": 0.7454, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.6855920721699429, | |
| "learning_rate": 1.960112767443493e-05, | |
| "loss": 0.7717, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.6772325613360068, | |
| "learning_rate": 1.9589729045399935e-05, | |
| "loss": 0.7856, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.71783357181609, | |
| "learning_rate": 1.957817324187987e-05, | |
| "loss": 0.853, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.651081876287226, | |
| "learning_rate": 1.9566460453272945e-05, | |
| "loss": 0.7758, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.7224314306320238, | |
| "learning_rate": 1.955459087155033e-05, | |
| "loss": 0.7698, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.7417366304140689, | |
| "learning_rate": 1.954256469125301e-05, | |
| "loss": 0.7565, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.705860767192474, | |
| "learning_rate": 1.953038210948861e-05, | |
| "loss": 0.7521, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.6853121441171455, | |
| "learning_rate": 1.9518043325928157e-05, | |
| "loss": 0.7623, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.8531911402501465, | |
| "learning_rate": 1.9505548542802805e-05, | |
| "loss": 0.7695, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.7097227658031378, | |
| "learning_rate": 1.9492897964900512e-05, | |
| "loss": 0.7686, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.7305229333984748, | |
| "learning_rate": 1.9480091799562706e-05, | |
| "loss": 0.7594, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.6769047157703583, | |
| "learning_rate": 1.9467130256680867e-05, | |
| "loss": 0.7823, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.9227853066106919, | |
| "learning_rate": 1.9454013548693103e-05, | |
| "loss": 0.7604, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.6788203795678404, | |
| "learning_rate": 1.9440741890580643e-05, | |
| "loss": 0.7733, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.8101074316401654, | |
| "learning_rate": 1.9427315499864345e-05, | |
| "loss": 0.8122, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.7465255356027457, | |
| "learning_rate": 1.9413734596601104e-05, | |
| "loss": 0.7712, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.727253005664772, | |
| "learning_rate": 1.9399999403380266e-05, | |
| "loss": 0.769, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.7445474864707267, | |
| "learning_rate": 1.9386110145319962e-05, | |
| "loss": 0.7253, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.7711000445972649, | |
| "learning_rate": 1.937206705006344e-05, | |
| "loss": 0.7745, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.7347966122270397, | |
| "learning_rate": 1.93578703477753e-05, | |
| "loss": 0.747, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.8007034594663927, | |
| "learning_rate": 1.9343520271137764e-05, | |
| "loss": 0.8104, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.6690335939490886, | |
| "learning_rate": 1.932901705534683e-05, | |
| "loss": 0.7703, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.6876843319988346, | |
| "learning_rate": 1.9314360938108427e-05, | |
| "loss": 0.7574, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.8026015331475426, | |
| "learning_rate": 1.929955215963452e-05, | |
| "loss": 0.7153, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.6083840676926519, | |
| "learning_rate": 1.928459096263918e-05, | |
| "loss": 0.7728, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.7793114985568895, | |
| "learning_rate": 1.926947759233459e-05, | |
| "loss": 0.7554, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.6471718444852702, | |
| "learning_rate": 1.9254212296427043e-05, | |
| "loss": 0.7561, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.5288366573811976, | |
| "learning_rate": 1.9238795325112867e-05, | |
| "loss": 0.8227, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.7540586194531294, | |
| "learning_rate": 1.922322693107434e-05, | |
| "loss": 0.7607, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.8504085552569656, | |
| "learning_rate": 1.920750736947553e-05, | |
| "loss": 0.7451, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7553248522606649, | |
| "learning_rate": 1.9191636897958123e-05, | |
| "loss": 0.7472, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.761337788781711, | |
| "learning_rate": 1.9175615776637212e-05, | |
| "loss": 0.7485, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7221698572987841, | |
| "learning_rate": 1.9159444268097012e-05, | |
| "loss": 0.7387, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7417521945174339, | |
| "learning_rate": 1.9143122637386567e-05, | |
| "loss": 0.8169, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7985821809817109, | |
| "learning_rate": 1.9126651152015404e-05, | |
| "loss": 0.7292, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7182359164981328, | |
| "learning_rate": 1.9110030081949157e-05, | |
| "loss": 0.762, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.6823103865926027, | |
| "learning_rate": 1.9093259699605125e-05, | |
| "loss": 0.7707, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7037103987162874, | |
| "learning_rate": 1.907634027984782e-05, | |
| "loss": 0.7543, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.6541431669524109, | |
| "learning_rate": 1.905927209998447e-05, | |
| "loss": 0.7865, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.6113930545486561, | |
| "learning_rate": 1.9042055439760447e-05, | |
| "loss": 0.7503, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.6734766407541498, | |
| "learning_rate": 1.90246905813547e-05, | |
| "loss": 0.7409, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.7038106427408382, | |
| "learning_rate": 1.900717780937514e-05, | |
| "loss": 0.7462, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.7009766460600779, | |
| "learning_rate": 1.8989517410853956e-05, | |
| "loss": 0.7209, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.6698192317539081, | |
| "learning_rate": 1.897170967524291e-05, | |
| "loss": 0.7489, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.7761639823087021, | |
| "learning_rate": 1.8953754894408617e-05, | |
| "loss": 0.7528, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.6938555197534098, | |
| "learning_rate": 1.893565336262773e-05, | |
| "loss": 0.7982, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.780829308967089, | |
| "learning_rate": 1.8917405376582144e-05, | |
| "loss": 0.7441, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.6775910638531962, | |
| "learning_rate": 1.8899011235354118e-05, | |
| "loss": 0.7348, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.7887079664457872, | |
| "learning_rate": 1.8880471240421365e-05, | |
| "loss": 0.7391, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.7224368317100063, | |
| "learning_rate": 1.8861785695652142e-05, | |
| "loss": 0.7357, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.7193648740337865, | |
| "learning_rate": 1.8842954907300236e-05, | |
| "loss": 0.7436, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.6156201185600855, | |
| "learning_rate": 1.8823979183999965e-05, | |
| "loss": 0.7347, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.6564009178371437, | |
| "learning_rate": 1.880485883676111e-05, | |
| "loss": 0.7905, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.6295945305014009, | |
| "learning_rate": 1.878559417896382e-05, | |
| "loss": 0.7354, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.7473996781600849, | |
| "learning_rate": 1.876618552635348e-05, | |
| "loss": 0.7416, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.6864395075209073, | |
| "learning_rate": 1.8746633197035525e-05, | |
| "loss": 0.7144, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.7540407091733655, | |
| "learning_rate": 1.8726937511470247e-05, | |
| "loss": 0.7307, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.8221923969905752, | |
| "learning_rate": 1.870709879246752e-05, | |
| "loss": 0.728, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.647639291571937, | |
| "learning_rate": 1.8687117365181514e-05, | |
| "loss": 0.7643, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.876594778809403, | |
| "learning_rate": 1.8666993557105377e-05, | |
| "loss": 0.7066, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.6952076031723992, | |
| "learning_rate": 1.8646727698065865e-05, | |
| "loss": 0.7374, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.800282441200813, | |
| "learning_rate": 1.8626320120217922e-05, | |
| "loss": 0.7442, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.7474869566385066, | |
| "learning_rate": 1.8605771158039253e-05, | |
| "loss": 0.7235, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.722811246894062, | |
| "learning_rate": 1.858508114832483e-05, | |
| "loss": 0.7346, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.824122704451794, | |
| "learning_rate": 1.8564250430181387e-05, | |
| "loss": 0.7828, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.7590804525040064, | |
| "learning_rate": 1.8543279345021834e-05, | |
| "loss": 0.7317, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.6772804496327979, | |
| "learning_rate": 1.8522168236559693e-05, | |
| "loss": 0.7236, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.7301531426117454, | |
| "learning_rate": 1.850091745080345e-05, | |
| "loss": 0.7086, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.6810509924599181, | |
| "learning_rate": 1.847952733605088e-05, | |
| "loss": 0.7349, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.7569415947786665, | |
| "learning_rate": 1.8457998242883346e-05, | |
| "loss": 0.7318, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.7064012926575857, | |
| "learning_rate": 1.8436330524160048e-05, | |
| "loss": 0.7957, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.662694617080743, | |
| "learning_rate": 1.8414524535012244e-05, | |
| "loss": 0.709, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.8954259419770013, | |
| "learning_rate": 1.8392580632837423e-05, | |
| "loss": 0.7491, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.7024460813570171, | |
| "learning_rate": 1.8370499177293463e-05, | |
| "loss": 0.7115, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.8415609399672339, | |
| "learning_rate": 1.8348280530292712e-05, | |
| "loss": 0.7359, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.7254373543665715, | |
| "learning_rate": 1.8325925055996076e-05, | |
| "loss": 0.7299, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.7254597122440483, | |
| "learning_rate": 1.8303433120807043e-05, | |
| "loss": 0.733, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.7462576744539569, | |
| "learning_rate": 1.8280805093365674e-05, | |
| "loss": 0.7746, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.6748201934936428, | |
| "learning_rate": 1.8258041344542567e-05, | |
| "loss": 0.742, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.6451251599438318, | |
| "learning_rate": 1.8235142247432784e-05, | |
| "loss": 0.7047, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.6799131321350648, | |
| "learning_rate": 1.8212108177349722e-05, | |
| "loss": 0.7193, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.7972919554744684, | |
| "learning_rate": 1.8188939511818965e-05, | |
| "loss": 0.7381, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.7054502019474785, | |
| "learning_rate": 1.816563663057211e-05, | |
| "loss": 0.7109, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.5914644195974801, | |
| "learning_rate": 1.814219991554053e-05, | |
| "loss": 0.7697, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.7335647840733366, | |
| "learning_rate": 1.8118629750849106e-05, | |
| "loss": 0.7285, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.7183256424871337, | |
| "learning_rate": 1.8094926522809958e-05, | |
| "loss": 0.7131, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.6943282845084143, | |
| "learning_rate": 1.8071090619916095e-05, | |
| "loss": 0.7351, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.7377257711447108, | |
| "learning_rate": 1.804712243283504e-05, | |
| "loss": 0.7255, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.724125878072718, | |
| "learning_rate": 1.802302235440245e-05, | |
| "loss": 0.7866, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.7493613491039653, | |
| "learning_rate": 1.799879077961566e-05, | |
| "loss": 0.7365, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.8908469645001831, | |
| "learning_rate": 1.797442810562721e-05, | |
| "loss": 0.71, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.7053338175952263, | |
| "learning_rate": 1.7949934731738348e-05, | |
| "loss": 0.734, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.9612976311357089, | |
| "learning_rate": 1.7925311059392472e-05, | |
| "loss": 0.7079, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.6836899111297493, | |
| "learning_rate": 1.790055749216856e-05, | |
| "loss": 0.7196, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.9773072491586141, | |
| "learning_rate": 1.7875674435774546e-05, | |
| "loss": 0.706, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.6595513679823545, | |
| "learning_rate": 1.7850662298040676e-05, | |
| "loss": 0.7666, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.8016137835161344, | |
| "learning_rate": 1.7825521488912833e-05, | |
| "loss": 0.7306, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.6859313585230287, | |
| "learning_rate": 1.7800252420445788e-05, | |
| "loss": 0.7209, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.6889799204445292, | |
| "learning_rate": 1.7774855506796497e-05, | |
| "loss": 0.7222, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.7147323195488424, | |
| "learning_rate": 1.774933116421725e-05, | |
| "loss": 0.7205, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.7572184994622753, | |
| "learning_rate": 1.7723679811048904e-05, | |
| "loss": 0.7313, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.7495623875878256, | |
| "learning_rate": 1.7697901867713997e-05, | |
| "loss": 0.696, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.8587732175016841, | |
| "learning_rate": 1.767199775670986e-05, | |
| "loss": 0.79, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.8226627175371956, | |
| "learning_rate": 1.764596790260171e-05, | |
| "loss": 0.7136, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.7742898660936148, | |
| "learning_rate": 1.7619812732015664e-05, | |
| "loss": 0.7106, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.7199718228477165, | |
| "learning_rate": 1.7593532673631765e-05, | |
| "loss": 0.7081, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.8389904161132045, | |
| "learning_rate": 1.7567128158176955e-05, | |
| "loss": 0.6976, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.6885626524548528, | |
| "learning_rate": 1.754059961841801e-05, | |
| "loss": 0.7166, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.62855922965186, | |
| "learning_rate": 1.7513947489154443e-05, | |
| "loss": 0.7317, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.7645461252822144, | |
| "learning_rate": 1.7487172207211395e-05, | |
| "loss": 0.6871, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.7318185970621935, | |
| "learning_rate": 1.7460274211432463e-05, | |
| "loss": 0.7043, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.6593101915077957, | |
| "learning_rate": 1.7433253942672497e-05, | |
| "loss": 0.7422, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.8233033841823448, | |
| "learning_rate": 1.74061118437904e-05, | |
| "loss": 0.6828, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.6191230026164649, | |
| "learning_rate": 1.7378848359641846e-05, | |
| "loss": 0.7255, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.7333042560107498, | |
| "learning_rate": 1.7351463937072008e-05, | |
| "loss": 0.7715, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.7489281525910614, | |
| "learning_rate": 1.732395902490821e-05, | |
| "loss": 0.7094, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.7997292555576989, | |
| "learning_rate": 1.7296334073952606e-05, | |
| "loss": 0.7097, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.7939984871333575, | |
| "learning_rate": 1.726858953697475e-05, | |
| "loss": 0.7051, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.8980147872239701, | |
| "learning_rate": 1.7240725868704218e-05, | |
| "loss": 0.7124, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.9006092313288788, | |
| "learning_rate": 1.721274352582311e-05, | |
| "loss": 0.7164, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.7453391863645898, | |
| "learning_rate": 1.718464296695861e-05, | |
| "loss": 0.7834, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.7417223805399968, | |
| "learning_rate": 1.7156424652675433e-05, | |
| "loss": 0.7081, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.7534810498306946, | |
| "learning_rate": 1.7128089045468294e-05, | |
| "loss": 0.6968, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.7546517064189007, | |
| "learning_rate": 1.709963660975433e-05, | |
| "loss": 0.7126, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.760563289758312, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.7091, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.7581693602018251, | |
| "learning_rate": 1.7042383120040837e-05, | |
| "loss": 0.7308, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.6977006533134363, | |
| "learning_rate": 1.7013583004418994e-05, | |
| "loss": 0.7031, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.8223212695782158, | |
| "learning_rate": 1.698466793703032e-05, | |
| "loss": 0.759, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.9150130382964546, | |
| "learning_rate": 1.695563839178923e-05, | |
| "loss": 0.7349, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.7330338134515832, | |
| "learning_rate": 1.6926494844486412e-05, | |
| "loss": 0.6744, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.7739183269330984, | |
| "learning_rate": 1.6897237772781046e-05, | |
| "loss": 0.7137, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.656468237101363, | |
| "learning_rate": 1.6867867656192946e-05, | |
| "loss": 0.7042, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.7454467369978334, | |
| "learning_rate": 1.6838384976094738e-05, | |
| "loss": 0.7106, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.8209720393413101, | |
| "learning_rate": 1.6808790215703933e-05, | |
| "loss": 0.7491, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.6902446956425804, | |
| "learning_rate": 1.6779083860075032e-05, | |
| "loss": 0.7054, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.7880210894086325, | |
| "learning_rate": 1.674926639609157e-05, | |
| "loss": 0.71, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.6652067545160919, | |
| "learning_rate": 1.6719338312458123e-05, | |
| "loss": 0.7269, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.6306135539192167, | |
| "learning_rate": 1.6689300099692332e-05, | |
| "loss": 0.7134, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.8340897323013752, | |
| "learning_rate": 1.665915225011681e-05, | |
| "loss": 0.6879, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.6466154375134381, | |
| "learning_rate": 1.6628895257851136e-05, | |
| "loss": 0.7545, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.7618511659485634, | |
| "learning_rate": 1.65985296188037e-05, | |
| "loss": 0.6925, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.8187703179003039, | |
| "learning_rate": 1.656805583066361e-05, | |
| "loss": 0.7176, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.7502264225276143, | |
| "learning_rate": 1.6537474392892527e-05, | |
| "loss": 0.7095, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.8097969662672855, | |
| "learning_rate": 1.6506785806716464e-05, | |
| "loss": 0.697, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.6906146305800908, | |
| "learning_rate": 1.6475990575117603e-05, | |
| "loss": 0.6869, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.8223197770677472, | |
| "learning_rate": 1.644508920282601e-05, | |
| "loss": 0.753, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.7024827757147297, | |
| "learning_rate": 1.6414082196311402e-05, | |
| "loss": 0.6904, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.706997180401339, | |
| "learning_rate": 1.638297006377481e-05, | |
| "loss": 0.7003, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.761095407493811, | |
| "learning_rate": 1.6351753315140285e-05, | |
| "loss": 0.698, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.7422167616921054, | |
| "learning_rate": 1.6320432462046516e-05, | |
| "loss": 0.7186, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.7695788913922835, | |
| "learning_rate": 1.6289008017838447e-05, | |
| "loss": 0.708, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.6786701728712313, | |
| "learning_rate": 1.6257480497558873e-05, | |
| "loss": 0.6793, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.7805136584093828, | |
| "learning_rate": 1.622585041793999e-05, | |
| "loss": 0.7804, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.643788127329463, | |
| "learning_rate": 1.6194118297394935e-05, | |
| "loss": 0.6932, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.7779318756761856, | |
| "learning_rate": 1.6162284656009276e-05, | |
| "loss": 0.7118, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.7827061012286722, | |
| "learning_rate": 1.6130350015532498e-05, | |
| "loss": 0.7162, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.6252425539450749, | |
| "learning_rate": 1.6098314899369446e-05, | |
| "loss": 0.7002, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.6950977115396647, | |
| "learning_rate": 1.6066179832571762e-05, | |
| "loss": 0.6779, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.7564723629412936, | |
| "learning_rate": 1.603394534182925e-05, | |
| "loss": 0.7426, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.6662035677757666, | |
| "learning_rate": 1.6001611955461265e-05, | |
| "loss": 0.6738, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.7443885213732637, | |
| "learning_rate": 1.5969180203408052e-05, | |
| "loss": 0.6988, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.725792857216776, | |
| "learning_rate": 1.5936650617222063e-05, | |
| "loss": 0.7183, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.694790268487878, | |
| "learning_rate": 1.5904023730059227e-05, | |
| "loss": 0.6883, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.8056225558740404, | |
| "learning_rate": 1.5871300076670236e-05, | |
| "loss": 0.7136, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.686737751223464, | |
| "learning_rate": 1.5838480193391753e-05, | |
| "loss": 0.7702, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.7293053389782644, | |
| "learning_rate": 1.580556461813766e-05, | |
| "loss": 0.7136, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.7681622413122055, | |
| "learning_rate": 1.5772553890390196e-05, | |
| "loss": 0.6989, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.6996249418785109, | |
| "learning_rate": 1.573944855119115e-05, | |
| "loss": 0.7092, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.7656196254260119, | |
| "learning_rate": 1.5706249143132982e-05, | |
| "loss": 0.6912, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.7935276488984404, | |
| "learning_rate": 1.5672956210349923e-05, | |
| "loss": 0.6909, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.7590965741522488, | |
| "learning_rate": 1.5639570298509067e-05, | |
| "loss": 0.7734, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.7369044883176309, | |
| "learning_rate": 1.560609195480142e-05, | |
| "loss": 0.6818, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.7879759693028896, | |
| "learning_rate": 1.5572521727932937e-05, | |
| "loss": 0.6902, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.6997745371270365, | |
| "learning_rate": 1.5538860168115527e-05, | |
| "loss": 0.6972, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8777129282885041, | |
| "learning_rate": 1.5505107827058038e-05, | |
| "loss": 0.6896, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.830245073602301, | |
| "learning_rate": 1.5471265257957202e-05, | |
| "loss": 0.6856, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.6849413326975365, | |
| "learning_rate": 1.5437333015488586e-05, | |
| "loss": 0.7025, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.863043013790657, | |
| "learning_rate": 1.5403311655797494e-05, | |
| "loss": 0.7513, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8187297413958853, | |
| "learning_rate": 1.536920173648984e-05, | |
| "loss": 0.7008, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.7844393722686187, | |
| "learning_rate": 1.5335003816623027e-05, | |
| "loss": 0.6828, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.6358387080819407, | |
| "learning_rate": 1.530071845669678e-05, | |
| "loss": 0.7122, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.735870161613146, | |
| "learning_rate": 1.526634621864395e-05, | |
| "loss": 0.6833, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.7720874652707324, | |
| "learning_rate": 1.52318876658213e-05, | |
| "loss": 0.7016, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.674584610026373, | |
| "learning_rate": 1.5197343363000308e-05, | |
| "loss": 0.717, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.7038084515462764, | |
| "learning_rate": 1.516271387635786e-05, | |
| "loss": 0.6707, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.6882612272149368, | |
| "learning_rate": 1.5127999773467001e-05, | |
| "loss": 0.7012, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.7024647865541672, | |
| "learning_rate": 1.5093201623287631e-05, | |
| "loss": 0.7181, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.6727023267211898, | |
| "learning_rate": 1.5058319996157172e-05, | |
| "loss": 0.6602, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.7465630000596386, | |
| "learning_rate": 1.5023355463781221e-05, | |
| "loss": 0.7163, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.6984225010098803, | |
| "learning_rate": 1.4988308599224182e-05, | |
| "loss": 0.7482, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8619915515737648, | |
| "learning_rate": 1.4953179976899878e-05, | |
| "loss": 0.6766, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.7348774289117588, | |
| "learning_rate": 1.4917970172562122e-05, | |
| "loss": 0.6944, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8366099612874214, | |
| "learning_rate": 1.4882679763295307e-05, | |
| "loss": 0.6723, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.5742961622865171, | |
| "learning_rate": 1.484730932750491e-05, | |
| "loss": 0.7052, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8026662308952525, | |
| "learning_rate": 1.4811859444908053e-05, | |
| "loss": 0.6838, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.6448865575111775, | |
| "learning_rate": 1.4776330696523964e-05, | |
| "loss": 0.7157, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.644596649858372, | |
| "learning_rate": 1.4740723664664483e-05, | |
| "loss": 0.6736, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.668387681674303, | |
| "learning_rate": 1.4705038932924502e-05, | |
| "loss": 0.7109, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.6448680461747227, | |
| "learning_rate": 1.4669277086172406e-05, | |
| "loss": 0.6836, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.7448204272963629, | |
| "learning_rate": 1.4633438710540488e-05, | |
| "loss": 0.6842, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.6434274761308751, | |
| "learning_rate": 1.4597524393415336e-05, | |
| "loss": 0.703, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.7176591670030213, | |
| "learning_rate": 1.4561534723428205e-05, | |
| "loss": 0.6742, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.6284667275951265, | |
| "learning_rate": 1.4525470290445392e-05, | |
| "loss": 0.7568, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.6643417344803181, | |
| "learning_rate": 1.4489331685558525e-05, | |
| "loss": 0.698, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.5893887225885674, | |
| "learning_rate": 1.4453119501074924e-05, | |
| "loss": 0.6838, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.7644763131943855, | |
| "learning_rate": 1.4416834330507857e-05, | |
| "loss": 0.69, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.6363652856489801, | |
| "learning_rate": 1.4380476768566825e-05, | |
| "loss": 0.6928, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.7138641833174041, | |
| "learning_rate": 1.434404741114782e-05, | |
| "loss": 0.6661, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.68262173814636, | |
| "learning_rate": 1.4307546855323549e-05, | |
| "loss": 0.7209, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.7231471005358049, | |
| "learning_rate": 1.4270975699333653e-05, | |
| "loss": 0.6663, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.6960566739420361, | |
| "learning_rate": 1.4234334542574906e-05, | |
| "loss": 0.6743, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.6939441970735452, | |
| "learning_rate": 1.4197623985591373e-05, | |
| "loss": 0.7015, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.7026781632309379, | |
| "learning_rate": 1.4160844630064596e-05, | |
| "loss": 0.6844, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.7622535513911972, | |
| "learning_rate": 1.4123997078803708e-05, | |
| "loss": 0.7098, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.72349415439694, | |
| "learning_rate": 1.4087081935735565e-05, | |
| "loss": 0.7212, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.6413870716335239, | |
| "learning_rate": 1.4050099805894837e-05, | |
| "loss": 0.7123, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.6941497000214715, | |
| "learning_rate": 1.4013051295414108e-05, | |
| "loss": 0.6519, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.7075235355277671, | |
| "learning_rate": 1.3975937011513931e-05, | |
| "loss": 0.7026, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.636952219522705, | |
| "learning_rate": 1.3938757562492873e-05, | |
| "loss": 0.6792, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.669973911082459, | |
| "learning_rate": 1.3901513557717554e-05, | |
| "loss": 0.6937, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.679186651508683, | |
| "learning_rate": 1.3864205607612648e-05, | |
| "loss": 0.7368, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.6881658251981118, | |
| "learning_rate": 1.3826834323650899e-05, | |
| "loss": 0.6791, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.6201517167263211, | |
| "learning_rate": 1.378940031834307e-05, | |
| "loss": 0.7006, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.697739152945046, | |
| "learning_rate": 1.3751904205227922e-05, | |
| "loss": 0.6644, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.658040151246998, | |
| "learning_rate": 1.3714346598862168e-05, | |
| "loss": 0.6703, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.6360588523416896, | |
| "learning_rate": 1.3676728114810367e-05, | |
| "loss": 0.6858, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.6931092055073744, | |
| "learning_rate": 1.3639049369634878e-05, | |
| "loss": 0.7127, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.604853285388519, | |
| "learning_rate": 1.3601310980885714e-05, | |
| "loss": 0.7177, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.6548628153153928, | |
| "learning_rate": 1.356351356709045e-05, | |
| "loss": 0.6955, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.6956475231891892, | |
| "learning_rate": 1.3525657747744073e-05, | |
| "loss": 0.6704, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.6376961636224463, | |
| "learning_rate": 1.3487744143298822e-05, | |
| "loss": 0.6851, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.6432910988957646, | |
| "learning_rate": 1.344977337515404e-05, | |
| "loss": 0.6687, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.7375306077254118, | |
| "learning_rate": 1.3411746065645961e-05, | |
| "loss": 0.6838, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.6490770364006783, | |
| "learning_rate": 1.3373662838037538e-05, | |
| "loss": 0.7153, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.7446844428783186, | |
| "learning_rate": 1.3335524316508208e-05, | |
| "loss": 0.6543, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.766041681974227, | |
| "learning_rate": 1.3297331126143667e-05, | |
| "loss": 0.681, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.6935412449990047, | |
| "learning_rate": 1.3259083892925633e-05, | |
| "loss": 0.6915, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.6254311054575801, | |
| "learning_rate": 1.3220783243721571e-05, | |
| "loss": 0.6729, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7762631783044686, | |
| "learning_rate": 1.3182429806274442e-05, | |
| "loss": 0.689, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.6881672074584007, | |
| "learning_rate": 1.3144024209192378e-05, | |
| "loss": 0.7411, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.6358753562338745, | |
| "learning_rate": 1.3105567081938423e-05, | |
| "loss": 0.6587, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7470874264026129, | |
| "learning_rate": 1.3067059054820184e-05, | |
| "loss": 0.6764, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7687355342474296, | |
| "learning_rate": 1.3028500758979507e-05, | |
| "loss": 0.6772, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.6627686750796015, | |
| "learning_rate": 1.2989892826382144e-05, | |
| "loss": 0.7011, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7090967259878612, | |
| "learning_rate": 1.2951235889807386e-05, | |
| "loss": 0.678, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7084210855766322, | |
| "learning_rate": 1.2912530582837683e-05, | |
| "loss": 0.7204, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.663549702472892, | |
| "learning_rate": 1.2873777539848284e-05, | |
| "loss": 0.6595, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.6506360572049058, | |
| "learning_rate": 1.2834977395996817e-05, | |
| "loss": 0.7012, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8430861012936159, | |
| "learning_rate": 1.279613078721289e-05, | |
| "loss": 0.6719, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.6853971357661781, | |
| "learning_rate": 1.2757238350187669e-05, | |
| "loss": 0.6706, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.6984152332324459, | |
| "learning_rate": 1.2718300722363431e-05, | |
| "loss": 0.6806, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.6840075820553941, | |
| "learning_rate": 1.2679318541923131e-05, | |
| "loss": 0.6747, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.6083802270169807, | |
| "learning_rate": 1.2640292447779932e-05, | |
| "loss": 0.7461, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.7286888062718239, | |
| "learning_rate": 1.2601223079566745e-05, | |
| "loss": 0.6981, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.6187807297641909, | |
| "learning_rate": 1.2562111077625723e-05, | |
| "loss": 0.6639, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7224231721832342, | |
| "learning_rate": 1.25229570829978e-05, | |
| "loss": 0.6673, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7247095699954746, | |
| "learning_rate": 1.248376173741215e-05, | |
| "loss": 0.674, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.719452642263008, | |
| "learning_rate": 1.2444525683275687e-05, | |
| "loss": 0.6541, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.6325971772242212, | |
| "learning_rate": 1.2405249563662539e-05, | |
| "loss": 0.7007, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7513157088192811, | |
| "learning_rate": 1.2365934022303491e-05, | |
| "loss": 0.6848, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.6708081453198337, | |
| "learning_rate": 1.2326579703575464e-05, | |
| "loss": 0.6696, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.6520575494208382, | |
| "learning_rate": 1.2287187252490914e-05, | |
| "loss": 0.6833, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7203706269009875, | |
| "learning_rate": 1.2247757314687296e-05, | |
| "loss": 0.6715, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.6216048079860456, | |
| "learning_rate": 1.2208290536416466e-05, | |
| "loss": 0.6673, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.6383333297172197, | |
| "learning_rate": 1.2168787564534078e-05, | |
| "loss": 0.7199, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.6272184428646471, | |
| "learning_rate": 1.212924904648902e-05, | |
| "loss": 0.6811, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.6231867328925055, | |
| "learning_rate": 1.2089675630312755e-05, | |
| "loss": 0.6871, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7010909078929145, | |
| "learning_rate": 1.2050067964608725e-05, | |
| "loss": 0.6613, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.6299035968697418, | |
| "learning_rate": 1.2010426698541728e-05, | |
| "loss": 0.6758, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7157000181311906, | |
| "learning_rate": 1.1970752481827261e-05, | |
| "loss": 0.6627, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.6454302453667989, | |
| "learning_rate": 1.1931045964720882e-05, | |
| "loss": 0.726, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.7659639276679516, | |
| "learning_rate": 1.1891307798007536e-05, | |
| "loss": 0.6917, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.6755697345701616, | |
| "learning_rate": 1.1851538632990922e-05, | |
| "loss": 0.6818, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.6654538211659489, | |
| "learning_rate": 1.1811739121482777e-05, | |
| "loss": 0.6467, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.6950161014080175, | |
| "learning_rate": 1.177190991579223e-05, | |
| "loss": 0.6743, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.6794644177066992, | |
| "learning_rate": 1.1732051668715082e-05, | |
| "loss": 0.6829, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.6348118454316969, | |
| "learning_rate": 1.1692165033523117e-05, | |
| "loss": 0.6524, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.7090401791841606, | |
| "learning_rate": 1.1652250663953415e-05, | |
| "loss": 0.733, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.6926179531524889, | |
| "learning_rate": 1.1612309214197599e-05, | |
| "loss": 0.6448, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7072241385311281, | |
| "learning_rate": 1.1572341338891145e-05, | |
| "loss": 0.6758, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.6675592242362295, | |
| "learning_rate": 1.1532347693102632e-05, | |
| "loss": 0.6936, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.6753398878253597, | |
| "learning_rate": 1.1492328932323022e-05, | |
| "loss": 0.669, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.6804988146949109, | |
| "learning_rate": 1.1452285712454905e-05, | |
| "loss": 0.6773, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7440373259704037, | |
| "learning_rate": 1.1412218689801748e-05, | |
| "loss": 0.7477, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.6646052826352247, | |
| "learning_rate": 1.1372128521057155e-05, | |
| "loss": 0.6728, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7709667240576361, | |
| "learning_rate": 1.1332015863294078e-05, | |
| "loss": 0.6343, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7165327110839048, | |
| "learning_rate": 1.1291881373954066e-05, | |
| "loss": 0.695, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7121053905605914, | |
| "learning_rate": 1.125172571083649e-05, | |
| "loss": 0.6701, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7327494114274017, | |
| "learning_rate": 1.1211549532087749e-05, | |
| "loss": 0.695, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7101126814293066, | |
| "learning_rate": 1.1171353496190499e-05, | |
| "loss": 0.7329, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.8069699735523975, | |
| "learning_rate": 1.1131138261952845e-05, | |
| "loss": 0.6599, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.8618469121997681, | |
| "learning_rate": 1.109090448849755e-05, | |
| "loss": 0.6456, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.8182088681876697, | |
| "learning_rate": 1.105065283525124e-05, | |
| "loss": 0.6772, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.6538529876543769, | |
| "learning_rate": 1.1010383961933582e-05, | |
| "loss": 0.6455, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.6448383225287607, | |
| "learning_rate": 1.0970098528546482e-05, | |
| "loss": 0.6735, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.6953896254306905, | |
| "learning_rate": 1.0929797195363259e-05, | |
| "loss": 0.7333, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.6721053785565456, | |
| "learning_rate": 1.088948062291783e-05, | |
| "loss": 0.6477, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.5904282967032243, | |
| "learning_rate": 1.0849149471993883e-05, | |
| "loss": 0.6787, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.6703922439817444, | |
| "learning_rate": 1.0808804403614044e-05, | |
| "loss": 0.6511, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8705442920527945, | |
| "learning_rate": 1.0768446079029044e-05, | |
| "loss": 0.6733, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8287230781701082, | |
| "learning_rate": 1.0728075159706881e-05, | |
| "loss": 0.6605, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.6189163109686591, | |
| "learning_rate": 1.0687692307321984e-05, | |
| "loss": 0.6753, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.6781933564255066, | |
| "learning_rate": 1.0647298183744359e-05, | |
| "loss": 0.7282, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7957473649289525, | |
| "learning_rate": 1.0606893451028743e-05, | |
| "loss": 0.65, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7485910605540942, | |
| "learning_rate": 1.0566478771403763e-05, | |
| "loss": 0.6776, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7327909650878064, | |
| "learning_rate": 1.0526054807261067e-05, | |
| "loss": 0.6706, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7991905466858001, | |
| "learning_rate": 1.0485622221144485e-05, | |
| "loss": 0.6758, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.6809947402682689, | |
| "learning_rate": 1.0445181675739144e-05, | |
| "loss": 0.6619, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.20009020727991, | |
| "learning_rate": 1.0404733833860639e-05, | |
| "loss": 0.688, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.903755672099354, | |
| "learning_rate": 1.0364279358444144e-05, | |
| "loss": 0.6817, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.7232572314336939, | |
| "learning_rate": 1.0323818912533561e-05, | |
| "loss": 0.656, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.7379740610126245, | |
| "learning_rate": 1.0283353159270644e-05, | |
| "loss": 0.6733, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.86964222843065, | |
| "learning_rate": 1.0242882761884132e-05, | |
| "loss": 0.6601, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.6661044375368231, | |
| "learning_rate": 1.0202408383678887e-05, | |
| "loss": 0.6819, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8253802655023462, | |
| "learning_rate": 1.0161930688025018e-05, | |
| "loss": 0.7143, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8015741446733053, | |
| "learning_rate": 1.012145033834699e-05, | |
| "loss": 0.6719, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.5821933682049611, | |
| "learning_rate": 1.0080967998112787e-05, | |
| "loss": 0.6811, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.7690954493881962, | |
| "learning_rate": 1.0040484330823006e-05, | |
| "loss": 0.6565, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7315445864189613, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6754, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7386622252432524, | |
| "learning_rate": 9.959515669176997e-06, | |
| "loss": 0.6603, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.6919812903907313, | |
| "learning_rate": 9.919032001887215e-06, | |
| "loss": 0.7076, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8006599563405511, | |
| "learning_rate": 9.878549661653013e-06, | |
| "loss": 0.6364, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.6901988727816551, | |
| "learning_rate": 9.838069311974986e-06, | |
| "loss": 0.661, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.6678547489465795, | |
| "learning_rate": 9.797591616321115e-06, | |
| "loss": 0.6624, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7748054049097298, | |
| "learning_rate": 9.757117238115871e-06, | |
| "loss": 0.6451, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7553281040072153, | |
| "learning_rate": 9.71664684072936e-06, | |
| "loss": 0.6778, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.641131867690177, | |
| "learning_rate": 9.676181087466444e-06, | |
| "loss": 0.6551, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7291710749634952, | |
| "learning_rate": 9.63572064155586e-06, | |
| "loss": 0.7335, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7163481736242343, | |
| "learning_rate": 9.595266166139366e-06, | |
| "loss": 0.6441, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.6572156520903942, | |
| "learning_rate": 9.55481832426086e-06, | |
| "loss": 0.6504, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.6559467876911665, | |
| "learning_rate": 9.514377778855521e-06, | |
| "loss": 0.657, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.662909871152895, | |
| "learning_rate": 9.473945192738933e-06, | |
| "loss": 0.6645, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7274810191049129, | |
| "learning_rate": 9.433521228596237e-06, | |
| "loss": 0.6467, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.648261063314425, | |
| "learning_rate": 9.393106548971257e-06, | |
| "loss": 0.7467, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.657451387169043, | |
| "learning_rate": 9.352701816255643e-06, | |
| "loss": 0.667, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7124264275789814, | |
| "learning_rate": 9.312307692678016e-06, | |
| "loss": 0.6421, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.6478844634318383, | |
| "learning_rate": 9.27192484029312e-06, | |
| "loss": 0.688, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.6450618353291538, | |
| "learning_rate": 9.231553920970958e-06, | |
| "loss": 0.6645, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.6355390990355307, | |
| "learning_rate": 9.19119559638596e-06, | |
| "loss": 0.6626, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.6654110928484159, | |
| "learning_rate": 9.150850528006118e-06, | |
| "loss": 0.7181, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.673116583781922, | |
| "learning_rate": 9.110519377082174e-06, | |
| "loss": 0.6683, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.6943712975431352, | |
| "learning_rate": 9.070202804636745e-06, | |
| "loss": 0.6355, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7727459948269628, | |
| "learning_rate": 9.02990147145352e-06, | |
| "loss": 0.665, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.6426988150098589, | |
| "learning_rate": 8.98961603806642e-06, | |
| "loss": 0.65, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.659509909598213, | |
| "learning_rate": 8.949347164748761e-06, | |
| "loss": 0.6709, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7492054794995399, | |
| "learning_rate": 8.909095511502452e-06, | |
| "loss": 0.7386, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.6811842428516481, | |
| "learning_rate": 8.868861738047158e-06, | |
| "loss": 0.6432, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.5727608447671194, | |
| "learning_rate": 8.828646503809505e-06, | |
| "loss": 0.667, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.6886018587603037, | |
| "learning_rate": 8.788450467912254e-06, | |
| "loss": 0.654, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7074868518593189, | |
| "learning_rate": 8.748274289163514e-06, | |
| "loss": 0.6586, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7334493370583429, | |
| "learning_rate": 8.708118626045939e-06, | |
| "loss": 0.657, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.8201991046253926, | |
| "learning_rate": 8.667984136705927e-06, | |
| "loss": 0.6701, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.5969030852448933, | |
| "learning_rate": 8.62787147894285e-06, | |
| "loss": 0.6988, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.6576566732974023, | |
| "learning_rate": 8.587781310198253e-06, | |
| "loss": 0.656, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.6919291582377041, | |
| "learning_rate": 8.5477142875451e-06, | |
| "loss": 0.6261, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7003569661536186, | |
| "learning_rate": 8.50767106767698e-06, | |
| "loss": 0.6501, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7744163988155506, | |
| "learning_rate": 8.46765230689737e-06, | |
| "loss": 0.6383, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.6650808234702702, | |
| "learning_rate": 8.427658661108857e-06, | |
| "loss": 0.6645, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.6583815924553357, | |
| "learning_rate": 8.387690785802403e-06, | |
| "loss": 0.6552, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.6496240712606984, | |
| "learning_rate": 8.347749336046587e-06, | |
| "loss": 0.6678, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.6920289415492432, | |
| "learning_rate": 8.307834966476885e-06, | |
| "loss": 0.6852, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.604457834334182, | |
| "learning_rate": 8.267948331284923e-06, | |
| "loss": 0.6591, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.6682155428303499, | |
| "learning_rate": 8.228090084207773e-06, | |
| "loss": 0.6561, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7355914531635479, | |
| "learning_rate": 8.188260878517224e-06, | |
| "loss": 0.6581, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.6564122035284753, | |
| "learning_rate": 8.148461367009081e-06, | |
| "loss": 0.6918, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.6538360721528242, | |
| "learning_rate": 8.108692201992466e-06, | |
| "loss": 0.6542, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.674957820323119, | |
| "learning_rate": 8.068954035279121e-06, | |
| "loss": 0.6692, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.6380977695912416, | |
| "learning_rate": 8.02924751817274e-06, | |
| "loss": 0.6391, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.587744980514578, | |
| "learning_rate": 7.989573301458274e-06, | |
| "loss": 0.6637, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.6805705907418358, | |
| "learning_rate": 7.949932035391279e-06, | |
| "loss": 0.6667, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.6270081521977198, | |
| "learning_rate": 7.91032436968725e-06, | |
| "loss": 0.7029, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.6742300289735369, | |
| "learning_rate": 7.870750953510983e-06, | |
| "loss": 0.6603, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.8924071754646948, | |
| "learning_rate": 7.831212435465925e-06, | |
| "loss": 0.6694, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.5876531309244762, | |
| "learning_rate": 7.791709463583541e-06, | |
| "loss": 0.6478, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7553792523453502, | |
| "learning_rate": 7.752242685312709e-06, | |
| "loss": 0.6335, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.5954675056029863, | |
| "learning_rate": 7.712812747509091e-06, | |
| "loss": 0.6599, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.693082993076316, | |
| "learning_rate": 7.673420296424541e-06, | |
| "loss": 0.6537, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.6374690276629207, | |
| "learning_rate": 7.63406597769651e-06, | |
| "loss": 0.702, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.6350743256053588, | |
| "learning_rate": 7.594750436337467e-06, | |
| "loss": 0.6354, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.6142273470612479, | |
| "learning_rate": 7.5554743167243135e-06, | |
| "loss": 0.6545, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7377267478799707, | |
| "learning_rate": 7.516238262587851e-06, | |
| "loss": 0.6664, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.6263530432538805, | |
| "learning_rate": 7.4770429170022e-06, | |
| "loss": 0.6484, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.6785776573913042, | |
| "learning_rate": 7.4378889223742766e-06, | |
| "loss": 0.631, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.6638092030648043, | |
| "learning_rate": 7.398776920433257e-06, | |
| "loss": 0.7161, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7290380027719847, | |
| "learning_rate": 7.35970755222007e-06, | |
| "loss": 0.6701, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7051357789907977, | |
| "learning_rate": 7.320681458076871e-06, | |
| "loss": 0.6357, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.6047614601388198, | |
| "learning_rate": 7.2816992776365714e-06, | |
| "loss": 0.6591, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7301702851880185, | |
| "learning_rate": 7.2427616498123356e-06, | |
| "loss": 0.6502, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.713717801487329, | |
| "learning_rate": 7.203869212787112e-06, | |
| "loss": 0.6391, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7065239599437015, | |
| "learning_rate": 7.165022604003187e-06, | |
| "loss": 0.7297, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.6200614878322726, | |
| "learning_rate": 7.126222460151719e-06, | |
| "loss": 0.6432, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.6539748387323382, | |
| "learning_rate": 7.08746941716232e-06, | |
| "loss": 0.6165, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7012548229981432, | |
| "learning_rate": 7.048764110192618e-06, | |
| "loss": 0.6583, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.713792656054762, | |
| "learning_rate": 7.010107173617857e-06, | |
| "loss": 0.6277, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.6081705206738856, | |
| "learning_rate": 6.971499241020495e-06, | |
| "loss": 0.672, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.6784237349853492, | |
| "learning_rate": 6.932940945179818e-06, | |
| "loss": 0.7033, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7241290103475653, | |
| "learning_rate": 6.894432918061579e-06, | |
| "loss": 0.669, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.5695616074610608, | |
| "learning_rate": 6.855975790807623e-06, | |
| "loss": 0.6577, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7484769455712518, | |
| "learning_rate": 6.8175701937255645e-06, | |
| "loss": 0.6501, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.6614185441303015, | |
| "learning_rate": 6.77921675627843e-06, | |
| "loss": 0.6277, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.672200149137363, | |
| "learning_rate": 6.740916107074372e-06, | |
| "loss": 0.6458, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7200106295372684, | |
| "learning_rate": 6.702668873856339e-06, | |
| "loss": 0.6614, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.6641990003364376, | |
| "learning_rate": 6.664475683491797e-06, | |
| "loss": 0.7036, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.5879289278966713, | |
| "learning_rate": 6.6263371619624615e-06, | |
| "loss": 0.6549, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7248226505864468, | |
| "learning_rate": 6.588253934354039e-06, | |
| "loss": 0.6248, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7488352629327707, | |
| "learning_rate": 6.550226624845961e-06, | |
| "loss": 0.6592, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.7119848368674065, | |
| "learning_rate": 6.5122558567011775e-06, | |
| "loss": 0.6358, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.6187967774882688, | |
| "learning_rate": 6.474342252255927e-06, | |
| "loss": 0.662, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.6447575199279856, | |
| "learning_rate": 6.43648643290955e-06, | |
| "loss": 0.6374, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.579615602622246, | |
| "learning_rate": 6.398689019114289e-06, | |
| "loss": 0.6393, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.6939331778963205, | |
| "learning_rate": 6.360950630365126e-06, | |
| "loss": 0.6488, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.6216844743810305, | |
| "learning_rate": 6.323271885189636e-06, | |
| "loss": 0.6771, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.6636839389062641, | |
| "learning_rate": 6.2856534011378365e-06, | |
| "loss": 0.6174, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.8288156576061698, | |
| "learning_rate": 6.24809579477208e-06, | |
| "loss": 0.6561, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.6360493953719707, | |
| "learning_rate": 6.210599681656933e-06, | |
| "loss": 0.6912, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.6021297745464462, | |
| "learning_rate": 6.173165676349103e-06, | |
| "loss": 0.6426, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.6152387963399792, | |
| "learning_rate": 6.135794392387353e-06, | |
| "loss": 0.6488, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7904840795998466, | |
| "learning_rate": 6.09848644228245e-06, | |
| "loss": 0.6313, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.593113398457472, | |
| "learning_rate": 6.061242437507131e-06, | |
| "loss": 0.6536, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.6838676801534896, | |
| "learning_rate": 6.024062988486072e-06, | |
| "loss": 0.6536, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.5852968494358796, | |
| "learning_rate": 5.986948704585895e-06, | |
| "loss": 0.6833, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.6880650442925168, | |
| "learning_rate": 5.949900194105167e-06, | |
| "loss": 0.615, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.6963184620091328, | |
| "learning_rate": 5.912918064264441e-06, | |
| "loss": 0.6802, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.641163426385126, | |
| "learning_rate": 5.876002921196296e-06, | |
| "loss": 0.6252, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.6611089593608891, | |
| "learning_rate": 5.839155369935407e-06, | |
| "loss": 0.6215, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.5755346633727011, | |
| "learning_rate": 5.802376014408632e-06, | |
| "loss": 0.6653, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.689385738981433, | |
| "learning_rate": 5.765665457425102e-06, | |
| "loss": 0.6449, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.5943498129532476, | |
| "learning_rate": 5.729024300666349e-06, | |
| "loss": 0.6927, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.75720345607242, | |
| "learning_rate": 5.692453144676451e-06, | |
| "loss": 0.6395, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.6431721527289079, | |
| "learning_rate": 5.655952588852181e-06, | |
| "loss": 0.6454, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.5908943263514355, | |
| "learning_rate": 5.619523231433177e-06, | |
| "loss": 0.6473, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7212254015768811, | |
| "learning_rate": 5.5831656694921465e-06, | |
| "loss": 0.6558, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.6081843034998314, | |
| "learning_rate": 5.546880498925079e-06, | |
| "loss": 0.6347, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.578463789906364, | |
| "learning_rate": 5.510668314441474e-06, | |
| "loss": 0.6809, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.615140245545869, | |
| "learning_rate": 5.4745297095546125e-06, | |
| "loss": 0.6416, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.6672837804788065, | |
| "learning_rate": 5.438465276571796e-06, | |
| "loss": 0.6237, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.6445792457327395, | |
| "learning_rate": 5.40247560658467e-06, | |
| "loss": 0.6536, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.6462547880358498, | |
| "learning_rate": 5.366561289459512e-06, | |
| "loss": 0.649, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.6221813752371448, | |
| "learning_rate": 5.330722913827594e-06, | |
| "loss": 0.6484, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.6205058787869572, | |
| "learning_rate": 5.2949610670755e-06, | |
| "loss": 0.7018, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.6968523348168115, | |
| "learning_rate": 5.259276335335522e-06, | |
| "loss": 0.6455, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.630885368888865, | |
| "learning_rate": 5.223669303476041e-06, | |
| "loss": 0.6319, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.6781795525853452, | |
| "learning_rate": 5.18814055509195e-06, | |
| "loss": 0.6636, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.6848858498103306, | |
| "learning_rate": 5.152690672495091e-06, | |
| "loss": 0.6428, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.6468785611746863, | |
| "learning_rate": 5.117320236704697e-06, | |
| "loss": 0.6527, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.4458831989646992, | |
| "learning_rate": 5.08202982743788e-06, | |
| "loss": 0.6941, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6472389955425294, | |
| "learning_rate": 5.046820023100129e-06, | |
| "loss": 0.6284, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6635680749476713, | |
| "learning_rate": 5.01169140077582e-06, | |
| "loss": 0.6492, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6971370544968278, | |
| "learning_rate": 4.976644536218783e-06, | |
| "loss": 0.6266, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7794954579271794, | |
| "learning_rate": 4.9416800038428326e-06, | |
| "loss": 0.6448, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6799095465030572, | |
| "learning_rate": 4.9067983767123736e-06, | |
| "loss": 0.6514, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6532432700542071, | |
| "learning_rate": 4.872000226533001e-06, | |
| "loss": 0.6464, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6994117712449526, | |
| "learning_rate": 4.837286123642141e-06, | |
| "loss": 0.7218, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.5870973770863565, | |
| "learning_rate": 4.802656636999693e-06, | |
| "loss": 0.6633, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.8013741958726535, | |
| "learning_rate": 4.7681123341787e-06, | |
| "loss": 0.6072, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.6129642891356658, | |
| "learning_rate": 4.733653781356055e-06, | |
| "loss": 0.6532, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7076801695534043, | |
| "learning_rate": 4.699281543303222e-06, | |
| "loss": 0.633, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.6236629866450143, | |
| "learning_rate": 4.664996183376972e-06, | |
| "loss": 0.645, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.609684517349269, | |
| "learning_rate": 4.630798263510162e-06, | |
| "loss": 0.6458, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.5795979463866703, | |
| "learning_rate": 4.596688344202509e-06, | |
| "loss": 0.6523, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.6807246005682629, | |
| "learning_rate": 4.562666984511416e-06, | |
| "loss": 0.6105, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.5524238266816994, | |
| "learning_rate": 4.528734742042803e-06, | |
| "loss": 0.6605, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.660370294991325, | |
| "learning_rate": 4.494892172941965e-06, | |
| "loss": 0.6241, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7690865932483458, | |
| "learning_rate": 4.461139831884475e-06, | |
| "loss": 0.6614, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.6270039043389054, | |
| "learning_rate": 4.427478272067066e-06, | |
| "loss": 0.6937, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.6231361031902123, | |
| "learning_rate": 4.393908045198585e-06, | |
| "loss": 0.6427, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.5590129123246664, | |
| "learning_rate": 4.360429701490935e-06, | |
| "loss": 0.6442, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.6514974942052368, | |
| "learning_rate": 4.327043789650078e-06, | |
| "loss": 0.6377, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.748539607647128, | |
| "learning_rate": 4.2937508568670194e-06, | |
| "loss": 0.6398, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.6361382498031051, | |
| "learning_rate": 4.260551448808852e-06, | |
| "loss": 0.6659, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.5969276356602796, | |
| "learning_rate": 4.2274461096098085e-06, | |
| "loss": 0.7093, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.6703106363143092, | |
| "learning_rate": 4.194435381862343e-06, | |
| "loss": 0.6194, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.6425376870707464, | |
| "learning_rate": 4.1615198066082475e-06, | |
| "loss": 0.6556, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.591269622613119, | |
| "learning_rate": 4.12869992332977e-06, | |
| "loss": 0.6524, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7260321894324775, | |
| "learning_rate": 4.095976269940777e-06, | |
| "loss": 0.6416, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.5583461510051794, | |
| "learning_rate": 4.0633493827779425e-06, | |
| "loss": 0.6466, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7277733025288593, | |
| "learning_rate": 4.03081979659195e-06, | |
| "loss": 0.6579, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.6517350378102095, | |
| "learning_rate": 3.998388044538737e-06, | |
| "loss": 0.6547, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7853172538242879, | |
| "learning_rate": 3.966054658170754e-06, | |
| "loss": 0.6917, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.5838898626379716, | |
| "learning_rate": 3.933820167428241e-06, | |
| "loss": 0.6378, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.6112863650448396, | |
| "learning_rate": 3.901685100630554e-06, | |
| "loss": 0.6607, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.6843097085761197, | |
| "learning_rate": 3.869649984467504e-06, | |
| "loss": 0.6462, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.5914223449212442, | |
| "learning_rate": 3.837715343990727e-06, | |
| "loss": 0.6177, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.5972948686649531, | |
| "learning_rate": 3.8058817026050676e-06, | |
| "loss": 0.7189, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.5915925626028785, | |
| "learning_rate": 3.7741495820600128e-06, | |
| "loss": 0.6428, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7114421940143537, | |
| "learning_rate": 3.742519502441132e-06, | |
| "loss": 0.653, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.6512983655056599, | |
| "learning_rate": 3.7109919821615546e-06, | |
| "loss": 0.6562, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.5714596455670929, | |
| "learning_rate": 3.6795675379534857e-06, | |
| "loss": 0.6434, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.5773249093636545, | |
| "learning_rate": 3.6482466848597164e-06, | |
| "loss": 0.6445, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.644524398284823, | |
| "learning_rate": 3.6170299362251926e-06, | |
| "loss": 0.7094, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.6432991090373537, | |
| "learning_rate": 3.585917803688603e-06, | |
| "loss": 0.6549, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.6193127063887414, | |
| "learning_rate": 3.5549107971739905e-06, | |
| "loss": 0.645, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.6556133044492171, | |
| "learning_rate": 3.5240094248824e-06, | |
| "loss": 0.6462, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.6534604203544768, | |
| "learning_rate": 3.4932141932835362e-06, | |
| "loss": 0.6377, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.574479705465771, | |
| "learning_rate": 3.4625256071074776e-06, | |
| "loss": 0.6581, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7167915390508185, | |
| "learning_rate": 3.431944169336391e-06, | |
| "loss": 0.6998, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.5753509546170014, | |
| "learning_rate": 3.4014703811963024e-06, | |
| "loss": 0.627, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.547677827015996, | |
| "learning_rate": 3.3711047421488676e-06, | |
| "loss": 0.654, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.6849762180268518, | |
| "learning_rate": 3.3408477498831917e-06, | |
| "loss": 0.6191, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7317642874060407, | |
| "learning_rate": 3.3106999003076745e-06, | |
| "loss": 0.6112, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.5689472355475474, | |
| "learning_rate": 3.280661687541876e-06, | |
| "loss": 0.6573, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.6342221541841166, | |
| "learning_rate": 3.2507336039084315e-06, | |
| "loss": 0.6377, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6966057591584099, | |
| "learning_rate": 3.2209161399249677e-06, | |
| "loss": 0.71, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6590124389889497, | |
| "learning_rate": 3.1912097842960676e-06, | |
| "loss": 0.6376, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6238127220128191, | |
| "learning_rate": 3.1616150239052647e-06, | |
| "loss": 0.6216, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6271365515219529, | |
| "learning_rate": 3.132132343807056e-06, | |
| "loss": 0.6394, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6831564062808797, | |
| "learning_rate": 3.1027622272189572e-06, | |
| "loss": 0.6205, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6404577568502366, | |
| "learning_rate": 3.073505155513591e-06, | |
| "loss": 0.6269, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6402081470277372, | |
| "learning_rate": 3.0443616082107753e-06, | |
| "loss": 0.6381, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.5989734816151734, | |
| "learning_rate": 3.015332062969685e-06, | |
| "loss": 0.637, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.695960785486385, | |
| "learning_rate": 2.9864169955810085e-06, | |
| "loss": 0.616, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.6023623426765436, | |
| "learning_rate": 2.9576168799591663e-06, | |
| "loss": 0.6553, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.5828529926677192, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.6286, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.6808201103441773, | |
| "learning_rate": 2.900363390245674e-06, | |
| "loss": 0.6264, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.6190621367944866, | |
| "learning_rate": 2.8719109545317102e-06, | |
| "loss": 0.7047, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.5908794901355795, | |
| "learning_rate": 2.8435753473245697e-06, | |
| "loss": 0.6349, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.592429740136109, | |
| "learning_rate": 2.8153570330413925e-06, | |
| "loss": 0.6402, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.6963991041241643, | |
| "learning_rate": 2.7872564741768917e-06, | |
| "loss": 0.6456, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.5719804647487756, | |
| "learning_rate": 2.759274131295787e-06, | |
| "loss": 0.6349, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.6065837845826134, | |
| "learning_rate": 2.7314104630252502e-06, | |
| "loss": 0.6295, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.5997674456838561, | |
| "learning_rate": 2.7036659260473973e-06, | |
| "loss": 0.7026, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.6019462922224683, | |
| "learning_rate": 2.6760409750917925e-06, | |
| "loss": 0.6259, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.6341850666653158, | |
| "learning_rate": 2.648536062927999e-06, | |
| "loss": 0.6449, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.3001860073086675, | |
| "learning_rate": 2.6211516403581585e-06, | |
| "loss": 0.6439, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.5671409191707228, | |
| "learning_rate": 2.593888156209603e-06, | |
| "loss": 0.6429, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.6096302431352558, | |
| "learning_rate": 2.5667460573275028e-06, | |
| "loss": 0.6566, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6524426099497527, | |
| "learning_rate": 2.5397257885675396e-06, | |
| "loss": 0.6372, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.5815091971020746, | |
| "learning_rate": 2.512827792788606e-06, | |
| "loss": 0.6302, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6235790733836095, | |
| "learning_rate": 2.48605251084556e-06, | |
| "loss": 0.6964, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6657262678609176, | |
| "learning_rate": 2.459400381581997e-06, | |
| "loss": 0.6425, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.5226966338040596, | |
| "learning_rate": 2.432871841823047e-06, | |
| "loss": 0.6515, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6363516726370335, | |
| "learning_rate": 2.406467326368237e-06, | |
| "loss": 0.6308, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6302163611631845, | |
| "learning_rate": 2.3801872679843384e-06, | |
| "loss": 0.6395, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6404972641350611, | |
| "learning_rate": 2.3540320973982924e-06, | |
| "loss": 0.6441, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.5870759209949381, | |
| "learning_rate": 2.328002243290138e-06, | |
| "loss": 0.684, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.6477566587591898, | |
| "learning_rate": 2.3020981322860057e-06, | |
| "loss": 0.6065, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.7014352005135476, | |
| "learning_rate": 2.2763201889510987e-06, | |
| "loss": 0.6482, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.5847910108336893, | |
| "learning_rate": 2.2506688357827546e-06, | |
| "loss": 0.6379, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.8424888111482473, | |
| "learning_rate": 2.2251444932035094e-06, | |
| "loss": 0.6329, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.5889389063340384, | |
| "learning_rate": 2.1997475795542113e-06, | |
| "loss": 0.7132, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.6222285478118977, | |
| "learning_rate": 2.1744785110871713e-06, | |
| "loss": 0.6449, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.6184736487279212, | |
| "learning_rate": 2.149337701959325e-06, | |
| "loss": 0.6401, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.6721155524494571, | |
| "learning_rate": 2.124325564225458e-06, | |
| "loss": 0.6042, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7082382392952378, | |
| "learning_rate": 2.099442507831444e-06, | |
| "loss": 0.6277, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.5746323943249898, | |
| "learning_rate": 2.074688940607529e-06, | |
| "loss": 0.6534, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.6976877229331887, | |
| "learning_rate": 2.050065268261655e-06, | |
| "loss": 0.6784, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.5627102288511716, | |
| "learning_rate": 2.025571894372794e-06, | |
| "loss": 0.6289, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.6172828112978435, | |
| "learning_rate": 2.001209220384346e-06, | |
| "loss": 0.6294, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.5662594447199433, | |
| "learning_rate": 1.976977645597552e-06, | |
| "loss": 0.6436, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7074464895311687, | |
| "learning_rate": 1.9528775671649593e-06, | |
| "loss": 0.6173, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.6554966832321535, | |
| "learning_rate": 1.9289093800839067e-06, | |
| "loss": 0.6396, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.5872156699340129, | |
| "learning_rate": 1.9050734771900414e-06, | |
| "loss": 0.6371, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.6511462069083932, | |
| "learning_rate": 1.8813702491508956e-06, | |
| "loss": 0.7005, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.5779512437776096, | |
| "learning_rate": 1.8578000844594746e-06, | |
| "loss": 0.6329, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.673722716371842, | |
| "learning_rate": 1.8343633694278895e-06, | |
| "loss": 0.6256, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.5551784609278542, | |
| "learning_rate": 1.8110604881810357e-06, | |
| "loss": 0.6267, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.8001123157526435, | |
| "learning_rate": 1.7878918226502816e-06, | |
| "loss": 0.6534, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.5597566957231792, | |
| "learning_rate": 1.7648577525672195e-06, | |
| "loss": 0.6202, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.5524746395213825, | |
| "learning_rate": 1.7419586554574364e-06, | |
| "loss": 0.6055, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.6925034270245604, | |
| "learning_rate": 1.7191949066343306e-06, | |
| "loss": 0.6506, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.5982581710140573, | |
| "learning_rate": 1.69656687919296e-06, | |
| "loss": 0.6036, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.625640893394704, | |
| "learning_rate": 1.6740749440039262e-06, | |
| "loss": 0.6613, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.5791710291449589, | |
| "learning_rate": 1.6517194697072903e-06, | |
| "loss": 0.6189, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.6645440340948221, | |
| "learning_rate": 1.6295008227065367e-06, | |
| "loss": 0.6144, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.668105235928013, | |
| "learning_rate": 1.607419367162577e-06, | |
| "loss": 0.7108, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.5385899476858674, | |
| "learning_rate": 1.58547546498776e-06, | |
| "loss": 0.6441, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6843312572641347, | |
| "learning_rate": 1.5636694758399563e-06, | |
| "loss": 0.6146, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6517691074145857, | |
| "learning_rate": 1.542001757116658e-06, | |
| "loss": 0.6446, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6683044014030128, | |
| "learning_rate": 1.520472663949122e-06, | |
| "loss": 0.6259, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.5529928661133248, | |
| "learning_rate": 1.4990825491965522e-06, | |
| "loss": 0.6574, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6354267241399787, | |
| "learning_rate": 1.4778317634403082e-06, | |
| "loss": 0.6859, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6340161097980351, | |
| "learning_rate": 1.4567206549781699e-06, | |
| "loss": 0.6336, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6857637368517085, | |
| "learning_rate": 1.4357495698186186e-06, | |
| "loss": 0.6634, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6987918825152238, | |
| "learning_rate": 1.41491885167517e-06, | |
| "loss": 0.631, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.5905065070051041, | |
| "learning_rate": 1.3942288419607476e-06, | |
| "loss": 0.6318, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.5891202117275015, | |
| "learning_rate": 1.3736798797820783e-06, | |
| "loss": 0.6257, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.6389989609910032, | |
| "learning_rate": 1.3532723019341376e-06, | |
| "loss": 0.633, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.7481668062509925, | |
| "learning_rate": 1.3330064428946255e-06, | |
| "loss": 0.621, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.6201805967439965, | |
| "learning_rate": 1.3128826348184886e-06, | |
| "loss": 0.6891, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.711558886670601, | |
| "learning_rate": 1.2929012075324832e-06, | |
| "loss": 0.6331, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.5601641885965722, | |
| "learning_rate": 1.2730624885297537e-06, | |
| "loss": 0.6402, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.7003049669889405, | |
| "learning_rate": 1.2533668029644751e-06, | |
| "loss": 0.6392, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.6512766895573538, | |
| "learning_rate": 1.233814473646524e-06, | |
| "loss": 0.6435, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.6304938936760185, | |
| "learning_rate": 1.214405821036182e-06, | |
| "loss": 0.6402, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.6453066742327469, | |
| "learning_rate": 1.195141163238892e-06, | |
| "loss": 0.6906, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.7791569651001938, | |
| "learning_rate": 1.1760208160000364e-06, | |
| "loss": 0.6327, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.6242645456514428, | |
| "learning_rate": 1.1570450926997657e-06, | |
| "loss": 0.6259, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.5852932502584727, | |
| "learning_rate": 1.1382143043478599e-06, | |
| "loss": 0.6558, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.7187172398967998, | |
| "learning_rate": 1.1195287595786352e-06, | |
| "loss": 0.633, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.571420842271662, | |
| "learning_rate": 1.1009887646458862e-06, | |
| "loss": 0.6893, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.6286607515904005, | |
| "learning_rate": 1.0825946234178575e-06, | |
| "loss": 0.6251, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.5889490025349474, | |
| "learning_rate": 1.064346637372271e-06, | |
| "loss": 0.6456, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.6780378250552338, | |
| "learning_rate": 1.0462451055913847e-06, | |
| "loss": 0.6158, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.6488064822153722, | |
| "learning_rate": 1.0282903247570908e-06, | |
| "loss": 0.6014, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.5968658741723634, | |
| "learning_rate": 1.010482589146048e-06, | |
| "loss": 0.6446, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.6634490640695445, | |
| "learning_rate": 9.928221906248614e-07, | |
| "loss": 0.6842, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.6651655353815971, | |
| "learning_rate": 9.753094186453028e-07, | |
| "loss": 0.6141, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.5581435148846154, | |
| "learning_rate": 9.579445602395577e-07, | |
| "loss": 0.632, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.6312888479285492, | |
| "learning_rate": 9.407279000155311e-07, | |
| "loss": 0.6499, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.6905760011351594, | |
| "learning_rate": 9.23659720152179e-07, | |
| "loss": 0.619, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.6758689934885078, | |
| "learning_rate": 9.067403003948783e-07, | |
| "loss": 0.6512, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.5580070435331436, | |
| "learning_rate": 8.89969918050847e-07, | |
| "loss": 0.6426, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.6788609323246715, | |
| "learning_rate": 8.733488479845997e-07, | |
| "loss": 0.6926, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.7711818911159529, | |
| "learning_rate": 8.568773626134363e-07, | |
| "loss": 0.6431, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.5264819335472163, | |
| "learning_rate": 8.405557319029911e-07, | |
| "loss": 0.6346, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.6539755754682118, | |
| "learning_rate": 8.243842233627897e-07, | |
| "loss": 0.6088, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.6276884828047339, | |
| "learning_rate": 8.083631020418792e-07, | |
| "loss": 0.6392, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.6242556779335853, | |
| "learning_rate": 7.924926305244729e-07, | |
| "loss": 0.6303, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.6450261398029767, | |
| "learning_rate": 7.767730689256614e-07, | |
| "loss": 0.655, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.704079501490032, | |
| "learning_rate": 7.612046748871327e-07, | |
| "loss": 0.627, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.6396858617866267, | |
| "learning_rate": 7.457877035729588e-07, | |
| "loss": 0.6174, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.6287790509868376, | |
| "learning_rate": 7.305224076654127e-07, | |
| "loss": 0.6635, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.873048359207114, | |
| "learning_rate": 7.154090373608236e-07, | |
| "loss": 0.6388, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.5881138487876106, | |
| "learning_rate": 7.004478403654835e-07, | |
| "loss": 0.6062, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.657388597051673, | |
| "learning_rate": 6.856390618915775e-07, | |
| "loss": 0.6897, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.5485546716949598, | |
| "learning_rate": 6.709829446531734e-07, | |
| "loss": 0.6541, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.6699766341713999, | |
| "learning_rate": 6.564797288622371e-07, | |
| "loss": 0.6084, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.6040708813799974, | |
| "learning_rate": 6.421296522247012e-07, | |
| "loss": 0.6341, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.7310239374931192, | |
| "learning_rate": 6.279329499365649e-07, | |
| "loss": 0.6353, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.5800753989859047, | |
| "learning_rate": 6.138898546800398e-07, | |
| "loss": 0.6628, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.6185190468285704, | |
| "learning_rate": 6.000005966197387e-07, | |
| "loss": 0.6831, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.6615470484183518, | |
| "learning_rate": 5.86265403398899e-07, | |
| "loss": 0.6292, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.6671132213915263, | |
| "learning_rate": 5.726845001356573e-07, | |
| "loss": 0.615, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.710456836405862, | |
| "learning_rate": 5.592581094193584e-07, | |
| "loss": 0.6159, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.6052691264538014, | |
| "learning_rate": 5.459864513068991e-07, | |
| "loss": 0.6399, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.6425766862473259, | |
| "learning_rate": 5.328697433191321e-07, | |
| "loss": 0.6202, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.581802218379288, | |
| "learning_rate": 5.199082004372958e-07, | |
| "loss": 0.6338, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.7207535136328385, | |
| "learning_rate": 5.071020350994893e-07, | |
| "loss": 0.6336, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.606403956290431, | |
| "learning_rate": 4.944514571971981e-07, | |
| "loss": 0.6877, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.6890047165080336, | |
| "learning_rate": 4.81956674071844e-07, | |
| "loss": 0.6092, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.5361959776088648, | |
| "learning_rate": 4.696178905113913e-07, | |
| "loss": 0.6479, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.7361321178970542, | |
| "learning_rate": 4.5743530874699293e-07, | |
| "loss": 0.6283, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.6062549561428903, | |
| "learning_rate": 4.454091284496731e-07, | |
| "loss": 0.6327, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.6339383101378874, | |
| "learning_rate": 4.3353954672705533e-07, | |
| "loss": 0.6585, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.6510280022721718, | |
| "learning_rate": 4.218267581201296e-07, | |
| "loss": 0.673, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.6668925651315517, | |
| "learning_rate": 4.1027095460006715e-07, | |
| "loss": 0.6171, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.6387870452605278, | |
| "learning_rate": 3.988723255650728e-07, | |
| "loss": 0.6285, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.5570417151964138, | |
| "learning_rate": 3.876310578372833e-07, | |
| "loss": 0.6512, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.6804985201784207, | |
| "learning_rate": 3.7654733565969826e-07, | |
| "loss": 0.6208, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.6007759280722736, | |
| "learning_rate": 3.6562134069316857e-07, | |
| "loss": 0.6899, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.6069630554507705, | |
| "learning_rate": 3.548532520134129e-07, | |
| "loss": 0.6417, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.6020819730356862, | |
| "learning_rate": 3.442432461080858e-07, | |
| "loss": 0.6345, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.7599975053661953, | |
| "learning_rate": 3.3379149687388866e-07, | |
| "loss": 0.6241, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.7005319833124528, | |
| "learning_rate": 3.23498175613709e-07, | |
| "loss": 0.6116, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.6416299446222633, | |
| "learning_rate": 3.133634510338235e-07, | |
| "loss": 0.6489, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.5834401502468652, | |
| "learning_rate": 3.0338748924112483e-07, | |
| "loss": 0.6843, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.6922611041243157, | |
| "learning_rate": 2.935704537404083e-07, | |
| "loss": 0.6064, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.5686374086674281, | |
| "learning_rate": 2.839125054316838e-07, | |
| "loss": 0.6552, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.6062949563577924, | |
| "learning_rate": 2.744138026075405e-07, | |
| "loss": 0.647, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.6711008292078926, | |
| "learning_rate": 2.650745009505562e-07, | |
| "loss": 0.6079, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.6786978494337168, | |
| "learning_rate": 2.5589475353073987e-07, | |
| "loss": 0.6488, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.6081067481820261, | |
| "learning_rate": 2.468747108030289e-07, | |
| "loss": 0.6511, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.5725020341839012, | |
| "learning_rate": 2.380145206048201e-07, | |
| "loss": 0.6755, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.6861814398943483, | |
| "learning_rate": 2.2931432815354593e-07, | |
| "loss": 0.6373, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.6170554804449965, | |
| "learning_rate": 2.2077427604429435e-07, | |
| "loss": 0.6493, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.6825177064680187, | |
| "learning_rate": 2.123945042474751e-07, | |
| "loss": 0.6381, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.6226675880293764, | |
| "learning_rate": 2.0417515010652032e-07, | |
| "loss": 0.645, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.8098209287494129, | |
| "learning_rate": 1.9611634833564096e-07, | |
| "loss": 0.6268, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.5771913591950079, | |
| "learning_rate": 1.8821823101760949e-07, | |
| "loss": 0.6437, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.6693386449117495, | |
| "learning_rate": 1.8048092760160286e-07, | |
| "loss": 0.626, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.6563341557663969, | |
| "learning_rate": 1.7290456490107522e-07, | |
| "loss": 0.6453, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.6415636562126644, | |
| "learning_rate": 1.6548926709168634e-07, | |
| "loss": 0.6428, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.8492239870521986, | |
| "learning_rate": 1.5823515570925763e-07, | |
| "loss": 0.6353, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.6107662239821435, | |
| "learning_rate": 1.5114234964778707e-07, | |
| "loss": 0.6306, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.6807822568754512, | |
| "learning_rate": 1.4421096515749855e-07, | |
| "loss": 0.7021, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.5597939216807377, | |
| "learning_rate": 1.374411158429323e-07, | |
| "loss": 0.6446, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.7545145990379868, | |
| "learning_rate": 1.30832912661093e-07, | |
| "loss": 0.5991, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.6443720022531878, | |
| "learning_rate": 1.243864639196213e-07, | |
| "loss": 0.6401, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.6735630161306276, | |
| "learning_rate": 1.1810187527502182e-07, | |
| "loss": 0.6243, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.5527995931889649, | |
| "learning_rate": 1.1197924973093466e-07, | |
| "loss": 0.656, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6954231846383898, | |
| "learning_rate": 1.0601868763643997e-07, | |
| "loss": 0.6995, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.5794149258910508, | |
| "learning_rate": 1.0022028668442374e-07, | |
| "loss": 0.6525, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6763883420071272, | |
| "learning_rate": 9.45841419099669e-08, | |
| "loss": 0.6134, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6163957803822115, | |
| "learning_rate": 8.911034568879207e-08, | |
| "loss": 0.6463, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6872128172184772, | |
| "learning_rate": 8.379898773574924e-08, | |
| "loss": 0.6272, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6969807884085863, | |
| "learning_rate": 7.865015510334473e-08, | |
| "loss": 0.6154, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6067392208771799, | |
| "learning_rate": 7.366393218031564e-08, | |
| "loss": 0.6548, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.7326568974910563, | |
| "learning_rate": 6.884040069024434e-08, | |
| "loss": 0.6139, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.6271685352214629, | |
| "learning_rate": 6.417963969022389e-08, | |
| "loss": 0.6984, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.7231828207260607, | |
| "learning_rate": 5.968172556955365e-08, | |
| "loss": 0.6098, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.6886787137949122, | |
| "learning_rate": 5.534673204849572e-08, | |
| "loss": 0.6389, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.6968668645848208, | |
| "learning_rate": 5.1174730177064866e-08, | |
| "loss": 0.6127, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.5940420732274541, | |
| "learning_rate": 4.716578833386054e-08, | |
| "loss": 0.6101, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.5916204706340398, | |
| "learning_rate": 4.331997222494777e-08, | |
| "loss": 0.6435, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.6596010087216369, | |
| "learning_rate": 3.963734488278248e-08, | |
| "loss": 0.7019, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.689525877668857, | |
| "learning_rate": 3.6117966665175644e-08, | |
| "loss": 0.6174, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.7061654652231126, | |
| "learning_rate": 3.2761895254306285e-08, | |
| "loss": 0.6326, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.5320778827729019, | |
| "learning_rate": 2.9569185655773382e-08, | |
| "loss": 0.6332, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.6484502555965168, | |
| "learning_rate": 2.6539890197695428e-08, | |
| "loss": 0.6075, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.5932090460175373, | |
| "learning_rate": 2.3674058529855603e-08, | |
| "loss": 0.7004, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.6351269850447796, | |
| "learning_rate": 2.0971737622883515e-08, | |
| "loss": 0.6203, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.6469454549653973, | |
| "learning_rate": 1.8432971767488038e-08, | |
| "loss": 0.6311, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.730675850136464, | |
| "learning_rate": 1.605780257373124e-08, | |
| "loss": 0.6379, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.8638624065133594, | |
| "learning_rate": 1.3846268970344467e-08, | |
| "loss": 0.6049, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.6053294417590919, | |
| "learning_rate": 1.179840720409331e-08, | |
| "loss": 0.6473, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.6000098616667142, | |
| "learning_rate": 9.914250839180296e-09, | |
| "loss": 0.6451, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.7050234381418286, | |
| "learning_rate": 8.193830756699773e-09, | |
| "loss": 0.6587, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.5633463982321911, | |
| "learning_rate": 6.6371751541249865e-09, | |
| "loss": 0.6258, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.5930904755005525, | |
| "learning_rate": 5.2443095448506674e-09, | |
| "loss": 0.6397, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.6519111229301998, | |
| "learning_rate": 4.015256757774477e-09, | |
| "loss": 0.6157, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.6669790713557369, | |
| "learning_rate": 2.9500369369195313e-09, | |
| "loss": 0.6331, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.6697417881781613, | |
| "learning_rate": 2.0486675411102165e-09, | |
| "loss": 0.6671, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6523758350111415, | |
| "learning_rate": 1.3111633436779792e-09, | |
| "loss": 0.6909, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6926259144477184, | |
| "learning_rate": 7.375364322292911e-10, | |
| "loss": 0.6272, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.542270719932054, | |
| "learning_rate": 3.277962084369257e-10, | |
| "loss": 0.662, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6419827339376735, | |
| "learning_rate": 8.19493878945199e-11, | |
| "loss": 0.5797, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.637833002177059, | |
| "learning_rate": 0.0, | |
| "loss": 0.6405, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 801, | |
| "total_flos": 0.0, | |
| "train_loss": 0.032679110430599595, | |
| "train_runtime": 10037.2401, | |
| "train_samples_per_second": 164.437, | |
| "train_steps_per_second": 0.08 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 801, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |