| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.8600301010535368, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 7.163323782234957e-07, | |
| "loss": 2.0323, | |
| "step": 10, | |
| "train_LM": 0.9931640625 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.4326647564469915e-06, | |
| "loss": 2.0619, | |
| "step": 20, | |
| "train_LM": 1.0081787109375 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.1489971346704872e-06, | |
| "loss": 2.0231, | |
| "step": 30, | |
| "train_LM": 0.9273681640625 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.865329512893983e-06, | |
| "loss": 1.9714, | |
| "step": 40, | |
| "train_LM": 1.0142822265625 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.5816618911174783e-06, | |
| "loss": 1.8703, | |
| "step": 50, | |
| "train_LM": 0.8978271484375 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.2979942693409744e-06, | |
| "loss": 1.7548, | |
| "step": 60, | |
| "train_LM": 0.8167724609375 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5.01432664756447e-06, | |
| "loss": 1.6451, | |
| "step": 70, | |
| "train_LM": 0.801025390625 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5.730659025787966e-06, | |
| "loss": 1.5975, | |
| "step": 80, | |
| "train_LM": 0.773193359375 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.446991404011461e-06, | |
| "loss": 1.5418, | |
| "step": 90, | |
| "train_LM": 0.7550048828125 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.1633237822349565e-06, | |
| "loss": 1.519, | |
| "step": 100, | |
| "train_LM": 0.7457275390625 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 7.879656160458452e-06, | |
| "loss": 1.4836, | |
| "step": 110, | |
| "train_LM": 0.7720947265625 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 8.595988538681949e-06, | |
| "loss": 1.4816, | |
| "step": 120, | |
| "train_LM": 0.7650146484375 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.312320916905444e-06, | |
| "loss": 1.4378, | |
| "step": 130, | |
| "train_LM": 0.6661376953125 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.002865329512894e-05, | |
| "loss": 1.4387, | |
| "step": 140, | |
| "train_LM": 0.7376708984375 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.0744985673352435e-05, | |
| "loss": 1.4177, | |
| "step": 150, | |
| "train_LM": 0.724609375 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.1461318051575932e-05, | |
| "loss": 1.3805, | |
| "step": 160, | |
| "train_LM": 0.70050048828125 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.2177650429799429e-05, | |
| "loss": 1.3812, | |
| "step": 170, | |
| "train_LM": 0.7005615234375 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.2893982808022922e-05, | |
| "loss": 1.3465, | |
| "step": 180, | |
| "train_LM": 0.6685791015625 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.361031518624642e-05, | |
| "loss": 1.3491, | |
| "step": 190, | |
| "train_LM": 0.677490234375 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.4326647564469913e-05, | |
| "loss": 1.3211, | |
| "step": 200, | |
| "train_LM": 0.664306640625 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.5042979942693412e-05, | |
| "loss": 1.2896, | |
| "step": 210, | |
| "train_LM": 0.65985107421875 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.5759312320916904e-05, | |
| "loss": 1.2866, | |
| "step": 220, | |
| "train_LM": 0.65362548828125 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.64756446991404e-05, | |
| "loss": 1.2578, | |
| "step": 230, | |
| "train_LM": 0.5946044921875 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.7191977077363898e-05, | |
| "loss": 1.2582, | |
| "step": 240, | |
| "train_LM": 0.61859130859375 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.7908309455587395e-05, | |
| "loss": 1.2398, | |
| "step": 250, | |
| "train_LM": 0.613525390625 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.862464183381089e-05, | |
| "loss": 1.2373, | |
| "step": 260, | |
| "train_LM": 0.6136474609375 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9340974212034385e-05, | |
| "loss": 1.2273, | |
| "step": 270, | |
| "train_LM": 0.61920166015625 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.005730659025788e-05, | |
| "loss": 1.226, | |
| "step": 280, | |
| "train_LM": 0.62518310546875 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.0773638968481376e-05, | |
| "loss": 1.2234, | |
| "step": 290, | |
| "train_LM": 0.6243896484375 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.148997134670487e-05, | |
| "loss": 1.2205, | |
| "step": 300, | |
| "train_LM": 0.5906982421875 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.2206303724928367e-05, | |
| "loss": 1.2122, | |
| "step": 310, | |
| "train_LM": 0.61639404296875 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.2922636103151864e-05, | |
| "loss": 1.203, | |
| "step": 320, | |
| "train_LM": 0.5921630859375 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.363896848137536e-05, | |
| "loss": 1.2039, | |
| "step": 330, | |
| "train_LM": 0.60321044921875 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.4355300859598858e-05, | |
| "loss": 1.1911, | |
| "step": 340, | |
| "train_LM": 0.60760498046875 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.507163323782235e-05, | |
| "loss": 1.1849, | |
| "step": 350, | |
| "train_LM": 0.6004638671875 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.5787965616045845e-05, | |
| "loss": 1.1938, | |
| "step": 360, | |
| "train_LM": 0.6033935546875 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.6504297994269345e-05, | |
| "loss": 1.1869, | |
| "step": 370, | |
| "train_LM": 0.5823974609375 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.722063037249284e-05, | |
| "loss": 1.1832, | |
| "step": 380, | |
| "train_LM": 0.58441162109375 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.7936962750716332e-05, | |
| "loss": 1.1731, | |
| "step": 390, | |
| "train_LM": 0.5772705078125 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.8653295128939826e-05, | |
| "loss": 1.1723, | |
| "step": 400, | |
| "train_LM": 0.58673095703125 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.9369627507163327e-05, | |
| "loss": 1.1723, | |
| "step": 410, | |
| "train_LM": 0.5994873046875 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 3.0085959885386824e-05, | |
| "loss": 1.1728, | |
| "step": 420, | |
| "train_LM": 0.57855224609375 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 3.0802292263610314e-05, | |
| "loss": 1.165, | |
| "step": 430, | |
| "train_LM": 0.5885009765625 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 3.151862464183381e-05, | |
| "loss": 1.1695, | |
| "step": 440, | |
| "train_LM": 0.57763671875 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 3.223495702005731e-05, | |
| "loss": 1.1596, | |
| "step": 450, | |
| "train_LM": 0.56146240234375 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 3.29512893982808e-05, | |
| "loss": 1.1488, | |
| "step": 460, | |
| "train_LM": 0.57391357421875 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 3.36676217765043e-05, | |
| "loss": 1.1643, | |
| "step": 470, | |
| "train_LM": 0.56817626953125 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.4383954154727795e-05, | |
| "loss": 1.1438, | |
| "step": 480, | |
| "train_LM": 0.56756591796875 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.5100286532951296e-05, | |
| "loss": 1.1518, | |
| "step": 490, | |
| "train_LM": 0.57366943359375 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.581661891117479e-05, | |
| "loss": 1.1613, | |
| "step": 500, | |
| "train_LM": 0.59564208984375 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.653295128939828e-05, | |
| "loss": 1.1456, | |
| "step": 510, | |
| "train_LM": 0.5662841796875 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.724928366762178e-05, | |
| "loss": 1.1426, | |
| "step": 520, | |
| "train_LM": 0.5601806640625 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.796561604584528e-05, | |
| "loss": 1.1473, | |
| "step": 530, | |
| "train_LM": 0.5787353515625 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.868194842406877e-05, | |
| "loss": 1.1421, | |
| "step": 540, | |
| "train_LM": 0.574462890625 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 3.9398280802292264e-05, | |
| "loss": 1.1396, | |
| "step": 550, | |
| "train_LM": 0.57293701171875 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.011461318051576e-05, | |
| "loss": 1.1366, | |
| "step": 560, | |
| "train_LM": 0.576904296875 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.083094555873926e-05, | |
| "loss": 1.1338, | |
| "step": 570, | |
| "train_LM": 0.5560302734375 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.154727793696275e-05, | |
| "loss": 1.1359, | |
| "step": 580, | |
| "train_LM": 0.5733642578125 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.2263610315186246e-05, | |
| "loss": 1.1333, | |
| "step": 590, | |
| "train_LM": 0.57745361328125 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.297994269340974e-05, | |
| "loss": 1.1396, | |
| "step": 600, | |
| "train_LM": 0.5357666015625 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.369627507163324e-05, | |
| "loss": 1.1312, | |
| "step": 610, | |
| "train_LM": 0.58441162109375 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.441260744985673e-05, | |
| "loss": 1.137, | |
| "step": 620, | |
| "train_LM": 0.56988525390625 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.512893982808023e-05, | |
| "loss": 1.1236, | |
| "step": 630, | |
| "train_LM": 0.5614013671875 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.584527220630373e-05, | |
| "loss": 1.111, | |
| "step": 640, | |
| "train_LM": 0.5447998046875 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.656160458452722e-05, | |
| "loss": 1.1196, | |
| "step": 650, | |
| "train_LM": 0.5562744140625 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.727793696275072e-05, | |
| "loss": 1.124, | |
| "step": 660, | |
| "train_LM": 0.55029296875 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.7994269340974215e-05, | |
| "loss": 1.1239, | |
| "step": 670, | |
| "train_LM": 0.55340576171875 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.8710601719197715e-05, | |
| "loss": 1.1119, | |
| "step": 680, | |
| "train_LM": 0.54693603515625 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.942693409742121e-05, | |
| "loss": 1.119, | |
| "step": 690, | |
| "train_LM": 0.5472412109375 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.9999987475354104e-05, | |
| "loss": 1.11, | |
| "step": 700, | |
| "train_LM": 0.550048828125 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.9999549114065355e-05, | |
| "loss": 1.1024, | |
| "step": 710, | |
| "train_LM": 0.531005859375 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.999848453303098e-05, | |
| "loss": 1.1101, | |
| "step": 720, | |
| "train_LM": 0.5552978515625 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.9996793758917936e-05, | |
| "loss": 1.0939, | |
| "step": 730, | |
| "train_LM": 0.5531005859375 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.999447683407884e-05, | |
| "loss": 1.1049, | |
| "step": 740, | |
| "train_LM": 0.5413818359375 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.9991533816550875e-05, | |
| "loss": 1.1092, | |
| "step": 750, | |
| "train_LM": 0.5496826171875 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.998796478005443e-05, | |
| "loss": 1.1103, | |
| "step": 760, | |
| "train_LM": 0.5521240234375 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.998376981399114e-05, | |
| "loss": 1.0911, | |
| "step": 770, | |
| "train_LM": 0.52960205078125 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.9978949023441736e-05, | |
| "loss": 1.1009, | |
| "step": 780, | |
| "train_LM": 0.54736328125 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.997350252916335e-05, | |
| "loss": 1.1047, | |
| "step": 790, | |
| "train_LM": 0.566162109375 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.9967430467586555e-05, | |
| "loss": 1.1017, | |
| "step": 800, | |
| "train_LM": 0.55645751953125 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.9960732990811876e-05, | |
| "loss": 1.0986, | |
| "step": 810, | |
| "train_LM": 0.54345703125 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.995341026660603e-05, | |
| "loss": 1.0976, | |
| "step": 820, | |
| "train_LM": 0.5579833984375 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.994546247839769e-05, | |
| "loss": 1.1027, | |
| "step": 830, | |
| "train_LM": 0.53839111328125 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.9936889825272945e-05, | |
| "loss": 1.0909, | |
| "step": 840, | |
| "train_LM": 0.53472900390625 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.9927692521970235e-05, | |
| "loss": 1.0878, | |
| "step": 850, | |
| "train_LM": 0.5250244140625 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.9917870798875055e-05, | |
| "loss": 1.0945, | |
| "step": 860, | |
| "train_LM": 0.54498291015625 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.99074249020141e-05, | |
| "loss": 1.0837, | |
| "step": 870, | |
| "train_LM": 0.54376220703125 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.9896355093049174e-05, | |
| "loss": 1.094, | |
| "step": 880, | |
| "train_LM": 0.558349609375 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.9884661649270595e-05, | |
| "loss": 1.0913, | |
| "step": 890, | |
| "train_LM": 0.5499267578125 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.987234486359025e-05, | |
| "loss": 1.0946, | |
| "step": 900, | |
| "train_LM": 0.5506591796875 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.98594050445343e-05, | |
| "loss": 1.0881, | |
| "step": 910, | |
| "train_LM": 0.531005859375 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.984584251623539e-05, | |
| "loss": 1.0968, | |
| "step": 920, | |
| "train_LM": 0.55133056640625 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.983165761842456e-05, | |
| "loss": 1.087, | |
| "step": 930, | |
| "train_LM": 0.5438232421875 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.981685070642274e-05, | |
| "loss": 1.0843, | |
| "step": 940, | |
| "train_LM": 0.55621337890625 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.980142215113186e-05, | |
| "loss": 1.0778, | |
| "step": 950, | |
| "train_LM": 0.54248046875 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.978537233902553e-05, | |
| "loss": 1.0744, | |
| "step": 960, | |
| "train_LM": 0.53277587890625 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.976870167213935e-05, | |
| "loss": 1.078, | |
| "step": 970, | |
| "train_LM": 0.54443359375 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.9751410568060905e-05, | |
| "loss": 1.0804, | |
| "step": 980, | |
| "train_LM": 0.54437255859375 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.973349945991923e-05, | |
| "loss": 1.089, | |
| "step": 990, | |
| "train_LM": 0.53656005859375 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.971496879637401e-05, | |
| "loss": 1.0913, | |
| "step": 1000, | |
| "train_LM": 0.54193115234375 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.9695819041604285e-05, | |
| "loss": 1.0801, | |
| "step": 1010, | |
| "train_LM": 0.52960205078125 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.967605067529692e-05, | |
| "loss": 1.0743, | |
| "step": 1020, | |
| "train_LM": 0.51727294921875 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.9655664192634484e-05, | |
| "loss": 1.079, | |
| "step": 1030, | |
| "train_LM": 0.5628662109375 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.963466010428291e-05, | |
| "loss": 1.08, | |
| "step": 1040, | |
| "train_LM": 0.53753662109375 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.961303893637867e-05, | |
| "loss": 1.069, | |
| "step": 1050, | |
| "train_LM": 0.56597900390625 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.9590801230515635e-05, | |
| "loss": 1.0711, | |
| "step": 1060, | |
| "train_LM": 0.53118896484375 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.956794754373148e-05, | |
| "loss": 1.0853, | |
| "step": 1070, | |
| "train_LM": 0.540771484375 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.954447844849371e-05, | |
| "loss": 1.0756, | |
| "step": 1080, | |
| "train_LM": 0.5364990234375 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.9520394532685364e-05, | |
| "loss": 1.0706, | |
| "step": 1090, | |
| "train_LM": 0.5113525390625 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.949569639959028e-05, | |
| "loss": 1.0856, | |
| "step": 1100, | |
| "train_LM": 0.54327392578125 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.9470384667877947e-05, | |
| "loss": 1.0617, | |
| "step": 1110, | |
| "train_LM": 0.5179443359375 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.944445997158805e-05, | |
| "loss": 1.0779, | |
| "step": 1120, | |
| "train_LM": 0.5509033203125 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.9417922960114583e-05, | |
| "loss": 1.0668, | |
| "step": 1130, | |
| "train_LM": 0.538330078125 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.9390774298189544e-05, | |
| "loss": 1.0653, | |
| "step": 1140, | |
| "train_LM": 0.52734375 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.936301466586633e-05, | |
| "loss": 1.0694, | |
| "step": 1150, | |
| "train_LM": 0.51678466796875 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.933464475850267e-05, | |
| "loss": 1.0747, | |
| "step": 1160, | |
| "train_LM": 0.5343017578125 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.930566528674323e-05, | |
| "loss": 1.0688, | |
| "step": 1170, | |
| "train_LM": 0.5302734375 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.927607697650182e-05, | |
| "loss": 1.0663, | |
| "step": 1180, | |
| "train_LM": 0.51226806640625 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.9245880568943134e-05, | |
| "loss": 1.0716, | |
| "step": 1190, | |
| "train_LM": 0.5389404296875 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.921507682046432e-05, | |
| "loss": 1.0654, | |
| "step": 1200, | |
| "train_LM": 0.53076171875 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.9183666502675885e-05, | |
| "loss": 1.0718, | |
| "step": 1210, | |
| "train_LM": 0.521728515625 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.915165040238249e-05, | |
| "loss": 1.0618, | |
| "step": 1220, | |
| "train_LM": 0.53070068359375 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.911902932156315e-05, | |
| "loss": 1.0549, | |
| "step": 1230, | |
| "train_LM": 0.5350341796875 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.9085804077351206e-05, | |
| "loss": 1.0609, | |
| "step": 1240, | |
| "train_LM": 0.4913330078125 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.905197550201384e-05, | |
| "loss": 1.0628, | |
| "step": 1250, | |
| "train_LM": 0.5445556640625 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.901754444293118e-05, | |
| "loss": 1.0682, | |
| "step": 1260, | |
| "train_LM": 0.521484375 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.898251176257517e-05, | |
| "loss": 1.0526, | |
| "step": 1270, | |
| "train_LM": 0.51763916015625 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.894687833848787e-05, | |
| "loss": 1.0571, | |
| "step": 1280, | |
| "train_LM": 0.52374267578125 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.891064506325953e-05, | |
| "loss": 1.0637, | |
| "step": 1290, | |
| "train_LM": 0.55963134765625 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.887381284450622e-05, | |
| "loss": 1.0653, | |
| "step": 1300, | |
| "train_LM": 0.522216796875 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.883638260484709e-05, | |
| "loss": 1.0611, | |
| "step": 1310, | |
| "train_LM": 0.5406494140625 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.8798355281881235e-05, | |
| "loss": 1.068, | |
| "step": 1320, | |
| "train_LM": 0.5301513671875 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.8759731828164284e-05, | |
| "loss": 1.0563, | |
| "step": 1330, | |
| "train_LM": 0.52178955078125 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.872051321118444e-05, | |
| "loss": 1.0508, | |
| "step": 1340, | |
| "train_LM": 0.5125732421875 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.868070041333833e-05, | |
| "loss": 1.0569, | |
| "step": 1350, | |
| "train_LM": 0.53240966796875 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.864029443190633e-05, | |
| "loss": 1.0549, | |
| "step": 1360, | |
| "train_LM": 0.5390625 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.859929627902765e-05, | |
| "loss": 1.0572, | |
| "step": 1370, | |
| "train_LM": 0.536376953125 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.8557706981674906e-05, | |
| "loss": 1.0526, | |
| "step": 1380, | |
| "train_LM": 0.52813720703125 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.851552758162847e-05, | |
| "loss": 1.0554, | |
| "step": 1390, | |
| "train_LM": 0.5244140625 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.847275913545032e-05, | |
| "loss": 1.057, | |
| "step": 1400, | |
| "train_LM": 0.54168701171875 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.8429402714457586e-05, | |
| "loss": 1.0611, | |
| "step": 1410, | |
| "train_LM": 0.58660888671875 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.8385459404695755e-05, | |
| "loss": 1.0476, | |
| "step": 1420, | |
| "train_LM": 0.52166748046875 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.834093030691139e-05, | |
| "loss": 1.0513, | |
| "step": 1430, | |
| "train_LM": 0.509033203125 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.829581653652463e-05, | |
| "loss": 1.0501, | |
| "step": 1440, | |
| "train_LM": 0.517578125 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.82501192236012e-05, | |
| "loss": 1.0573, | |
| "step": 1450, | |
| "train_LM": 0.55438232421875 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.8203839512824145e-05, | |
| "loss": 1.0544, | |
| "step": 1460, | |
| "train_LM": 0.5068359375 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.8156978563465114e-05, | |
| "loss": 1.0505, | |
| "step": 1470, | |
| "train_LM": 0.5374755859375 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.8109537549355346e-05, | |
| "loss": 1.0478, | |
| "step": 1480, | |
| "train_LM": 0.5125732421875 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.806151765885627e-05, | |
| "loss": 1.0509, | |
| "step": 1490, | |
| "train_LM": 0.51953125 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.801292009482972e-05, | |
| "loss": 1.0453, | |
| "step": 1500, | |
| "train_LM": 0.54364013671875 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.796374607460782e-05, | |
| "loss": 1.0535, | |
| "step": 1510, | |
| "train_LM": 0.5399169921875 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.7913996829962494e-05, | |
| "loss": 1.0352, | |
| "step": 1520, | |
| "train_LM": 0.52886962890625 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.786367360707458e-05, | |
| "loss": 1.0477, | |
| "step": 1530, | |
| "train_LM": 0.5167236328125 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.7812777666502634e-05, | |
| "loss": 1.0496, | |
| "step": 1540, | |
| "train_LM": 0.51416015625 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.776131028315139e-05, | |
| "loss": 1.036, | |
| "step": 1550, | |
| "train_LM": 0.50091552734375 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.770927274623975e-05, | |
| "loss": 1.0462, | |
| "step": 1560, | |
| "train_LM": 0.557861328125 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.765666635926857e-05, | |
| "loss": 1.04, | |
| "step": 1570, | |
| "train_LM": 0.51849365234375 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.760349243998793e-05, | |
| "loss": 1.0457, | |
| "step": 1580, | |
| "train_LM": 0.51068115234375 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.754975232036418e-05, | |
| "loss": 1.048, | |
| "step": 1590, | |
| "train_LM": 0.53448486328125 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.749544734654657e-05, | |
| "loss": 1.0346, | |
| "step": 1600, | |
| "train_LM": 0.52642822265625 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.74405788788335e-05, | |
| "loss": 1.0532, | |
| "step": 1610, | |
| "train_LM": 0.53570556640625 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.738514829163848e-05, | |
| "loss": 1.0328, | |
| "step": 1620, | |
| "train_LM": 0.514404296875 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.732915697345567e-05, | |
| "loss": 1.0386, | |
| "step": 1630, | |
| "train_LM": 0.5050048828125 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.7272606326825144e-05, | |
| "loss": 1.0348, | |
| "step": 1640, | |
| "train_LM": 0.51141357421875 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.72154977682977e-05, | |
| "loss": 1.0445, | |
| "step": 1650, | |
| "train_LM": 0.53424072265625 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.715783272839943e-05, | |
| "loss": 1.0307, | |
| "step": 1660, | |
| "train_LM": 0.5203857421875 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.709961265159583e-05, | |
| "loss": 1.0361, | |
| "step": 1670, | |
| "train_LM": 0.51849365234375 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.704083899625568e-05, | |
| "loss": 1.0421, | |
| "step": 1680, | |
| "train_LM": 0.52880859375 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.698151323461448e-05, | |
| "loss": 1.0436, | |
| "step": 1690, | |
| "train_LM": 0.50469970703125 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.692163685273756e-05, | |
| "loss": 1.0332, | |
| "step": 1700, | |
| "train_LM": 0.51904296875 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.686121135048287e-05, | |
| "loss": 1.0395, | |
| "step": 1710, | |
| "train_LM": 0.51953125 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.6800238241463415e-05, | |
| "loss": 1.0391, | |
| "step": 1720, | |
| "train_LM": 0.5281982421875 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.673871905300935e-05, | |
| "loss": 1.0299, | |
| "step": 1730, | |
| "train_LM": 0.5076904296875 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.667665532612967e-05, | |
| "loss": 1.0351, | |
| "step": 1740, | |
| "train_LM": 0.49090576171875 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.661404861547368e-05, | |
| "loss": 1.0322, | |
| "step": 1750, | |
| "train_LM": 0.5260009765625 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.6550900489291985e-05, | |
| "loss": 1.0387, | |
| "step": 1760, | |
| "train_LM": 0.50994873046875 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.648721252939727e-05, | |
| "loss": 1.0296, | |
| "step": 1770, | |
| "train_LM": 0.50262451171875 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.642298633112462e-05, | |
| "loss": 1.0317, | |
| "step": 1780, | |
| "train_LM": 0.50982666015625 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.635822350329159e-05, | |
| "loss": 1.0305, | |
| "step": 1790, | |
| "train_LM": 0.52764892578125 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.629292566815791e-05, | |
| "loss": 1.0336, | |
| "step": 1800, | |
| "train_LM": 0.51715087890625 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.622709446138482e-05, | |
| "loss": 1.0357, | |
| "step": 1810, | |
| "train_LM": 0.4981689453125 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.616073153199413e-05, | |
| "loss": 1.0312, | |
| "step": 1820, | |
| "train_LM": 0.5146484375 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.609383854232689e-05, | |
| "loss": 1.0282, | |
| "step": 1830, | |
| "train_LM": 0.4881591796875 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.602641716800176e-05, | |
| "loss": 1.0285, | |
| "step": 1840, | |
| "train_LM": 0.5303955078125 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.595846909787302e-05, | |
| "loss": 1.036, | |
| "step": 1850, | |
| "train_LM": 0.5048828125 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.5889996033988314e-05, | |
| "loss": 1.0336, | |
| "step": 1860, | |
| "train_LM": 0.5347900390625 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.582099969154596e-05, | |
| "loss": 1.0266, | |
| "step": 1870, | |
| "train_LM": 0.51702880859375 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5751481798852e-05, | |
| "loss": 1.0417, | |
| "step": 1880, | |
| "train_LM": 0.53302001953125 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.568144409727693e-05, | |
| "loss": 1.0201, | |
| "step": 1890, | |
| "train_LM": 0.53863525390625 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.561088834121205e-05, | |
| "loss": 1.0373, | |
| "step": 1900, | |
| "train_LM": 0.517333984375 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.553981629802555e-05, | |
| "loss": 1.0279, | |
| "step": 1910, | |
| "train_LM": 0.4990234375 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.54682297480182e-05, | |
| "loss": 1.0269, | |
| "step": 1920, | |
| "train_LM": 0.53338623046875 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.53961304843788e-05, | |
| "loss": 1.0265, | |
| "step": 1930, | |
| "train_LM": 0.49761962890625 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.532352031313922e-05, | |
| "loss": 1.023, | |
| "step": 1940, | |
| "train_LM": 0.50335693359375 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.5250401053129165e-05, | |
| "loss": 1.0333, | |
| "step": 1950, | |
| "train_LM": 0.5223388671875 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.5176774535930675e-05, | |
| "loss": 1.0349, | |
| "step": 1960, | |
| "train_LM": 0.4984130859375 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.5102642605832136e-05, | |
| "loss": 1.0307, | |
| "step": 1970, | |
| "train_LM": 0.504638671875 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.50280071197822e-05, | |
| "loss": 1.0163, | |
| "step": 1980, | |
| "train_LM": 0.5086669921875 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.495286994734317e-05, | |
| "loss": 1.0167, | |
| "step": 1990, | |
| "train_LM": 0.5035400390625 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.4877232970644245e-05, | |
| "loss": 1.0235, | |
| "step": 2000, | |
| "train_LM": 0.5247802734375 | |
| } | |
| ], | |
| "max_steps": 6975, | |
| "num_train_epochs": 3, | |
| "total_flos": 0.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |