| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.877167630057803, | |
| "global_step": 27000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.020469596628538e-06, | |
| "loss": 3.6306, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "eval_exact_match": 61.45695364238411, | |
| "eval_f1": 72.78282401033857, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.869710596102849e-06, | |
| "loss": 1.6038, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "eval_exact_match": 75.1844843897824, | |
| "eval_f1": 84.48829717580529, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.485376071332488e-06, | |
| "loss": 1.2887, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "eval_exact_match": 78.53358561967833, | |
| "eval_f1": 87.09051972422425, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.101041546562128e-06, | |
| "loss": 1.1686, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "eval_exact_match": 79.52696310312204, | |
| "eval_f1": 87.93479628909573, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 8.716707021791769e-06, | |
| "loss": 1.1108, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "eval_exact_match": 80.53926206244087, | |
| "eval_f1": 88.73699523092536, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 8.332372497021408e-06, | |
| "loss": 1.0369, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "eval_exact_match": 81.81646168401136, | |
| "eval_f1": 89.39106999844736, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 7.948037972251048e-06, | |
| "loss": 0.958, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "eval_exact_match": 81.91106906338695, | |
| "eval_f1": 89.56880578196049, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 7.563703447480688e-06, | |
| "loss": 0.9526, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "eval_exact_match": 82.65846736045411, | |
| "eval_f1": 90.03671875268634, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 7.179368922710328e-06, | |
| "loss": 0.9191, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "eval_exact_match": 82.99905392620624, | |
| "eval_f1": 90.20622146176154, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 6.795034397939968e-06, | |
| "loss": 0.9354, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "eval_exact_match": 82.79091769157995, | |
| "eval_f1": 90.11036500429192, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.410699873169607e-06, | |
| "loss": 0.899, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "eval_exact_match": 83.07473982970671, | |
| "eval_f1": 90.31558728723542, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 6.026365348399247e-06, | |
| "loss": 0.8149, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "eval_exact_match": 84.03973509933775, | |
| "eval_f1": 90.85216746584881, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 5.6420308236288864e-06, | |
| "loss": 0.8048, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "eval_exact_match": 83.92620624408704, | |
| "eval_f1": 90.85115666556608, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 5.257696298858527e-06, | |
| "loss": 0.8114, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "eval_exact_match": 83.65184484389782, | |
| "eval_f1": 90.78896967774679, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.873361774088167e-06, | |
| "loss": 0.7993, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "eval_exact_match": 84.19110690633869, | |
| "eval_f1": 91.08154074192161, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 4.4890272493178065e-06, | |
| "loss": 0.7827, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "eval_exact_match": 84.19110690633869, | |
| "eval_f1": 91.13367427096787, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 4.104692724547447e-06, | |
| "loss": 0.7532, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "eval_exact_match": 84.21948912015137, | |
| "eval_f1": 91.1856347835388, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.720358199777086e-06, | |
| "loss": 0.7193, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "eval_exact_match": 84.26679280983917, | |
| "eval_f1": 91.23320546520823, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 3.3360236750067258e-06, | |
| "loss": 0.7207, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "eval_exact_match": 84.50331125827815, | |
| "eval_f1": 91.35916978909299, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 2.951689150236366e-06, | |
| "loss": 0.7191, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "eval_exact_match": 84.41816461684012, | |
| "eval_f1": 91.31523088316432, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 2.567354625466006e-06, | |
| "loss": 0.7171, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "eval_exact_match": 84.50331125827815, | |
| "eval_f1": 91.36423189060977, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 2.1830201006956455e-06, | |
| "loss": 0.705, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "eval_exact_match": 84.76821192052981, | |
| "eval_f1": 91.50448097761131, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 1.7986855759252856e-06, | |
| "loss": 0.6757, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "eval_exact_match": 84.33301797540209, | |
| "eval_f1": 91.37392029075976, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 1.4143510511549255e-06, | |
| "loss": 0.6607, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "eval_exact_match": 84.59791863765373, | |
| "eval_f1": 91.43676774222646, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.0300165263845651e-06, | |
| "loss": 0.6783, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "eval_exact_match": 84.64522232734153, | |
| "eval_f1": 91.49154084773687, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 6.45682001614205e-07, | |
| "loss": 0.6592, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "eval_exact_match": 84.82497634815516, | |
| "eval_f1": 91.6291225955011, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 2.613474768438449e-07, | |
| "loss": 0.671, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "eval_exact_match": 84.78713339640493, | |
| "eval_f1": 91.63400716675376, | |
| "step": 27000 | |
| } | |
| ], | |
| "max_steps": 27680, | |
| "num_train_epochs": 5, | |
| "total_flos": 9.877152009845146e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |