| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 50, | |
| "global_step": 5846, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.0238907849829352e-07, | |
| "loss": 2.3356, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.0238907849829352e-06, | |
| "loss": 1.5291, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.0477815699658705e-06, | |
| "loss": 1.5769, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.0716723549488053e-06, | |
| "loss": 1.703, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.095563139931741e-06, | |
| "loss": 1.7261, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5.119453924914676e-06, | |
| "loss": 1.805, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "eval_loss": 1.3673903942108154, | |
| "eval_runtime": 191.75, | |
| "eval_samples_per_second": 3.39, | |
| "eval_steps_per_second": 1.695, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 6.1433447098976105e-06, | |
| "loss": 1.6482, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.167235494880547e-06, | |
| "loss": 1.7396, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 8.191126279863482e-06, | |
| "loss": 1.8731, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.215017064846417e-06, | |
| "loss": 1.5663, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.0238907849829352e-05, | |
| "loss": 1.352, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_loss": 1.1858307123184204, | |
| "eval_runtime": 191.5669, | |
| "eval_samples_per_second": 3.393, | |
| "eval_steps_per_second": 1.697, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.1262798634812286e-05, | |
| "loss": 1.3743, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.2286689419795221e-05, | |
| "loss": 1.2731, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.3310580204778158e-05, | |
| "loss": 1.1203, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.4334470989761093e-05, | |
| "loss": 0.9248, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.5358361774744027e-05, | |
| "loss": 1.0793, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "eval_loss": 0.8566241264343262, | |
| "eval_runtime": 191.0031, | |
| "eval_samples_per_second": 3.403, | |
| "eval_steps_per_second": 1.702, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.6382252559726964e-05, | |
| "loss": 1.0165, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.7406143344709897e-05, | |
| "loss": 0.9651, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.8430034129692834e-05, | |
| "loss": 0.9725, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9453924914675768e-05, | |
| "loss": 0.8568, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.0477815699658705e-05, | |
| "loss": 0.828, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "eval_loss": 0.7728402018547058, | |
| "eval_runtime": 191.5283, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.1501706484641638e-05, | |
| "loss": 0.9695, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 2.2525597269624572e-05, | |
| "loss": 0.8466, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 2.354948805460751e-05, | |
| "loss": 0.7734, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 2.4573378839590442e-05, | |
| "loss": 0.815, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2.559726962457338e-05, | |
| "loss": 0.7571, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "eval_loss": 0.7384980916976929, | |
| "eval_runtime": 191.2905, | |
| "eval_samples_per_second": 3.398, | |
| "eval_steps_per_second": 1.699, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2.6621160409556316e-05, | |
| "loss": 0.7339, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2.7645051194539253e-05, | |
| "loss": 0.749, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 2.8668941979522186e-05, | |
| "loss": 0.7901, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 2.969283276450512e-05, | |
| "loss": 0.753, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 2.9999882374583493e-05, | |
| "loss": 0.5984, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "eval_loss": 0.7139731049537659, | |
| "eval_runtime": 191.4082, | |
| "eval_samples_per_second": 3.396, | |
| "eval_steps_per_second": 1.698, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 2.9999306254535426e-05, | |
| "loss": 0.7082, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 2.999825005360422e-05, | |
| "loss": 0.7059, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 2.9996713805595556e-05, | |
| "loss": 0.7265, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.9994697559679935e-05, | |
| "loss": 0.7542, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.9992201380391072e-05, | |
| "loss": 0.7967, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "eval_loss": 0.7131698727607727, | |
| "eval_runtime": 191.2353, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.699, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.998922534762387e-05, | |
| "loss": 0.7316, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.9985769556631827e-05, | |
| "loss": 0.764, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.9981834118024003e-05, | |
| "loss": 0.7398, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.9977419157761483e-05, | |
| "loss": 0.6904, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.9972524817153332e-05, | |
| "loss": 0.6366, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "eval_loss": 0.7042288184165955, | |
| "eval_runtime": 191.0789, | |
| "eval_samples_per_second": 3.402, | |
| "eval_steps_per_second": 1.701, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.9967151252852094e-05, | |
| "loss": 0.7075, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.9961298636848747e-05, | |
| "loss": 0.7198, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.9954967156467236e-05, | |
| "loss": 0.7407, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.9948157014358436e-05, | |
| "loss": 0.7417, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.9940868428493703e-05, | |
| "loss": 0.6902, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "eval_loss": 0.6833683252334595, | |
| "eval_runtime": 191.4439, | |
| "eval_samples_per_second": 3.395, | |
| "eval_steps_per_second": 1.698, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.993310163215788e-05, | |
| "loss": 0.7069, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.9924856873941814e-05, | |
| "loss": 0.7658, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.991613441773444e-05, | |
| "loss": 0.6817, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.99069345427143e-05, | |
| "loss": 0.6754, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.9897257543340622e-05, | |
| "loss": 0.7354, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "eval_loss": 0.6793980002403259, | |
| "eval_runtime": 191.1709, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.988710372934389e-05, | |
| "loss": 0.8299, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.9876473425715934e-05, | |
| "loss": 0.7134, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.986536697269953e-05, | |
| "loss": 0.6017, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.9853784725777496e-05, | |
| "loss": 0.7073, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 2.984172705566134e-05, | |
| "loss": 0.7073, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "eval_loss": 0.6876562833786011, | |
| "eval_runtime": 191.0978, | |
| "eval_samples_per_second": 3.401, | |
| "eval_steps_per_second": 1.701, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 2.982919434827937e-05, | |
| "loss": 0.6608, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 2.9816187004764356e-05, | |
| "loss": 0.6204, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 2.980270544144068e-05, | |
| "loss": 0.6901, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 2.9788750089811017e-05, | |
| "loss": 0.653, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 2.9774321396542538e-05, | |
| "loss": 0.7558, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "eval_loss": 0.6407818794250488, | |
| "eval_runtime": 191.1891, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 2.975941982345258e-05, | |
| "loss": 0.6821, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 2.9744045847493897e-05, | |
| "loss": 0.6245, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 2.9728199960739377e-05, | |
| "loss": 0.6667, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 2.9711882670366303e-05, | |
| "loss": 0.6947, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 2.9695094498640097e-05, | |
| "loss": 0.6622, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "eval_loss": 0.6254723072052002, | |
| "eval_runtime": 191.3077, | |
| "eval_samples_per_second": 3.398, | |
| "eval_steps_per_second": 1.699, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 2.9677835982897637e-05, | |
| "loss": 0.7466, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 2.9660107675530036e-05, | |
| "loss": 0.7269, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 2.964191014396497e-05, | |
| "loss": 0.7455, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.9623243970648508e-05, | |
| "loss": 0.6228, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.9604109753026484e-05, | |
| "loss": 0.7071, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "eval_loss": 0.626681387424469, | |
| "eval_runtime": 191.5654, | |
| "eval_samples_per_second": 3.393, | |
| "eval_steps_per_second": 1.697, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.9584508103525352e-05, | |
| "loss": 0.5913, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 2.9564439649532624e-05, | |
| "loss": 0.6259, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 2.9543905033376737e-05, | |
| "loss": 0.7175, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 2.9522904912306542e-05, | |
| "loss": 0.6687, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 2.9501439958470223e-05, | |
| "loss": 0.6281, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "eval_loss": 0.625874400138855, | |
| "eval_runtime": 191.1903, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 2.9479510858893833e-05, | |
| "loss": 0.6417, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 2.9457118315459263e-05, | |
| "loss": 0.6923, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.9434263044881795e-05, | |
| "loss": 0.6108, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.9410945778687166e-05, | |
| "loss": 0.5824, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.9387167263188134e-05, | |
| "loss": 0.6623, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "eval_loss": 0.6201896667480469, | |
| "eval_runtime": 191.3674, | |
| "eval_samples_per_second": 3.397, | |
| "eval_steps_per_second": 1.698, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 2.9362928259460615e-05, | |
| "loss": 0.6888, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 2.933822954331932e-05, | |
| "loss": 0.6624, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 2.9313071905292896e-05, | |
| "loss": 0.6936, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 2.9287456150598667e-05, | |
| "loss": 0.7167, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 2.9261383099116818e-05, | |
| "loss": 0.6423, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "eval_loss": 0.614385724067688, | |
| "eval_runtime": 191.5007, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 2.923485358536419e-05, | |
| "loss": 0.6628, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 2.9207868458467548e-05, | |
| "loss": 0.634, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 2.9180428582136406e-05, | |
| "loss": 0.6485, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 2.9152534834635386e-05, | |
| "loss": 0.705, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 2.9124188108756115e-05, | |
| "loss": 0.6506, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "eval_loss": 0.6195511817932129, | |
| "eval_runtime": 191.218, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.7, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 2.9095389311788626e-05, | |
| "loss": 0.6596, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 2.906613936549235e-05, | |
| "loss": 0.6726, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.9036439206066584e-05, | |
| "loss": 0.6191, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.9006289784120544e-05, | |
| "loss": 0.616, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 2.897569206464294e-05, | |
| "loss": 0.6141, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "eval_loss": 0.6134810447692871, | |
| "eval_runtime": 191.3238, | |
| "eval_samples_per_second": 3.397, | |
| "eval_steps_per_second": 1.699, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 2.8944647026971073e-05, | |
| "loss": 0.6493, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 2.891315566475951e-05, | |
| "loss": 0.6052, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 2.8881218985948265e-05, | |
| "loss": 0.5535, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 2.884883801273055e-05, | |
| "loss": 0.6467, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 2.8816013781520037e-05, | |
| "loss": 0.5386, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "eval_loss": 0.6097267270088196, | |
| "eval_runtime": 191.587, | |
| "eval_samples_per_second": 3.393, | |
| "eval_steps_per_second": 1.696, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.8782747342917714e-05, | |
| "loss": 0.6729, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.8749039761678246e-05, | |
| "loss": 0.7065, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.871489211667588e-05, | |
| "loss": 0.6251, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.868030550086995e-05, | |
| "loss": 0.6966, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.8645281021269837e-05, | |
| "loss": 0.661, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "eval_loss": 0.6025794148445129, | |
| "eval_runtime": 191.627, | |
| "eval_samples_per_second": 3.392, | |
| "eval_steps_per_second": 1.696, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.8609819798899616e-05, | |
| "loss": 0.7061, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 2.85739229687621e-05, | |
| "loss": 0.7411, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 2.8537591679802556e-05, | |
| "loss": 0.6016, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 2.850082709487192e-05, | |
| "loss": 0.5989, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.846363039068957e-05, | |
| "loss": 0.628, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "eval_loss": 0.5951941609382629, | |
| "eval_runtime": 191.6233, | |
| "eval_samples_per_second": 3.392, | |
| "eval_steps_per_second": 1.696, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.8426002757805684e-05, | |
| "loss": 0.6333, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.8387945400563107e-05, | |
| "loss": 0.65, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.8349459537058816e-05, | |
| "loss": 0.6119, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.8310546399104942e-05, | |
| "loss": 0.7187, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.8271207232189326e-05, | |
| "loss": 0.6502, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "eval_loss": 0.5898197293281555, | |
| "eval_runtime": 191.6127, | |
| "eval_samples_per_second": 3.392, | |
| "eval_steps_per_second": 1.696, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.823144329543568e-05, | |
| "loss": 0.6302, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.8191255861563248e-05, | |
| "loss": 0.6748, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.8150646216846107e-05, | |
| "loss": 0.6182, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.8109615661071973e-05, | |
| "loss": 0.6406, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.806816550750062e-05, | |
| "loss": 0.5601, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "eval_loss": 0.5858436822891235, | |
| "eval_runtime": 191.6692, | |
| "eval_samples_per_second": 3.391, | |
| "eval_steps_per_second": 1.696, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.802629708282183e-05, | |
| "loss": 0.5522, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.798401172711293e-05, | |
| "loss": 0.6217, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.7941310793795914e-05, | |
| "loss": 0.6141, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.7898195649594104e-05, | |
| "loss": 0.5273, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.785466767448843e-05, | |
| "loss": 0.6326, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "eval_loss": 0.5748882293701172, | |
| "eval_runtime": 191.4961, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.7810728261673242e-05, | |
| "loss": 0.557, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.776637881751172e-05, | |
| "loss": 0.6178, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.7721620761490877e-05, | |
| "loss": 0.6444, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.7676455526176096e-05, | |
| "loss": 0.6409, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.7630884557165313e-05, | |
| "loss": 0.6376, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "eval_loss": 0.5655392408370972, | |
| "eval_runtime": 191.9239, | |
| "eval_samples_per_second": 3.387, | |
| "eval_steps_per_second": 1.693, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.7584909313042722e-05, | |
| "loss": 0.6067, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.7538531265332104e-05, | |
| "loss": 0.6398, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.7491751898449713e-05, | |
| "loss": 0.5683, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.7444572709656788e-05, | |
| "loss": 0.5864, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.739699520901161e-05, | |
| "loss": 0.6145, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "eval_loss": 0.561920702457428, | |
| "eval_runtime": 191.8344, | |
| "eval_samples_per_second": 3.388, | |
| "eval_steps_per_second": 1.694, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.7349020919321168e-05, | |
| "loss": 0.5887, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.730065137609244e-05, | |
| "loss": 0.5826, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.725188812748324e-05, | |
| "loss": 0.5703, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7202732734252635e-05, | |
| "loss": 0.631, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7153186769711038e-05, | |
| "loss": 0.5654, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "eval_loss": 0.5552613735198975, | |
| "eval_runtime": 191.2617, | |
| "eval_samples_per_second": 3.398, | |
| "eval_steps_per_second": 1.699, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7103251819669823e-05, | |
| "loss": 0.5822, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.705292948239056e-05, | |
| "loss": 0.5752, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7002221368533893e-05, | |
| "loss": 0.6402, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6951129101107947e-05, | |
| "loss": 0.6384, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6899654315416425e-05, | |
| "loss": 0.5373, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "eval_loss": 0.5542144179344177, | |
| "eval_runtime": 191.2324, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.7, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6847798659006224e-05, | |
| "loss": 0.5887, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6795563791614726e-05, | |
| "loss": 0.5892, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.6742951385116673e-05, | |
| "loss": 0.5462, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.6689963123470657e-05, | |
| "loss": 0.6332, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.663660070266521e-05, | |
| "loss": 0.5696, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "eval_loss": 0.5435717105865479, | |
| "eval_runtime": 191.826, | |
| "eval_samples_per_second": 3.388, | |
| "eval_steps_per_second": 1.694, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.658286583066453e-05, | |
| "loss": 0.5227, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.6528760227353817e-05, | |
| "loss": 0.576, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.6474285624484226e-05, | |
| "loss": 0.6195, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.6419443765617415e-05, | |
| "loss": 0.5881, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.6364236406069776e-05, | |
| "loss": 0.5909, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "eval_loss": 0.5479687452316284, | |
| "eval_runtime": 191.7652, | |
| "eval_samples_per_second": 3.39, | |
| "eval_steps_per_second": 1.695, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.6308665312856222e-05, | |
| "loss": 0.5775, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.6252732264633656e-05, | |
| "loss": 0.6295, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.619643905164402e-05, | |
| "loss": 0.6261, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.6139787475657007e-05, | |
| "loss": 0.5329, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.6082779349912383e-05, | |
| "loss": 0.5193, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "eval_loss": 0.5510675311088562, | |
| "eval_runtime": 191.4274, | |
| "eval_samples_per_second": 3.396, | |
| "eval_steps_per_second": 1.698, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.6025416499061964e-05, | |
| "loss": 0.4693, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.596770075911121e-05, | |
| "loss": 0.5381, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.5909633977360442e-05, | |
| "loss": 0.6297, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.5851218012345742e-05, | |
| "loss": 0.6495, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.5792454733779463e-05, | |
| "loss": 0.6237, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "eval_loss": 0.5495699644088745, | |
| "eval_runtime": 191.6017, | |
| "eval_samples_per_second": 3.392, | |
| "eval_steps_per_second": 1.696, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.5733346022490364e-05, | |
| "loss": 0.4827, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.5673893770363434e-05, | |
| "loss": 0.6501, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.561409988027933e-05, | |
| "loss": 0.6398, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.5553966266053462e-05, | |
| "loss": 0.5673, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.5493494852374763e-05, | |
| "loss": 0.6133, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "eval_loss": 0.5484058260917664, | |
| "eval_runtime": 191.1432, | |
| "eval_samples_per_second": 3.401, | |
| "eval_steps_per_second": 1.7, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.5432687574744058e-05, | |
| "loss": 0.4929, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.537154637941213e-05, | |
| "loss": 0.5658, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.5310073223317427e-05, | |
| "loss": 0.6269, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.5248270074023417e-05, | |
| "loss": 0.6152, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.5186138909655618e-05, | |
| "loss": 0.5312, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "eval_loss": 0.5488571524620056, | |
| "eval_runtime": 191.5157, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.5123681718838288e-05, | |
| "loss": 0.5492, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 2.5060900500630776e-05, | |
| "loss": 0.56, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 2.4997797264463522e-05, | |
| "loss": 0.5767, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 2.4934374030073768e-05, | |
| "loss": 0.6002, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 2.487063282744089e-05, | |
| "loss": 0.5502, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "eval_loss": 0.5474597811698914, | |
| "eval_runtime": 191.4818, | |
| "eval_samples_per_second": 3.395, | |
| "eval_steps_per_second": 1.697, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 2.4806575696721434e-05, | |
| "loss": 0.5837, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 2.474220468818382e-05, | |
| "loss": 0.5393, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.4677521862142707e-05, | |
| "loss": 0.5341, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.4612529288893063e-05, | |
| "loss": 0.516, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.45472290486439e-05, | |
| "loss": 0.5906, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "eval_loss": 0.5362663269042969, | |
| "eval_runtime": 191.3463, | |
| "eval_samples_per_second": 3.397, | |
| "eval_steps_per_second": 1.698, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 2.4481623231451674e-05, | |
| "loss": 0.5574, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 2.4415713937153424e-05, | |
| "loss": 0.5718, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 2.4349503275299517e-05, | |
| "loss": 0.5709, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.4282993365086178e-05, | |
| "loss": 0.591, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.4216186335287616e-05, | |
| "loss": 0.5848, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "eval_loss": 0.5400258898735046, | |
| "eval_runtime": 191.4572, | |
| "eval_samples_per_second": 3.395, | |
| "eval_steps_per_second": 1.698, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.4149084324187924e-05, | |
| "loss": 0.4966, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 2.4081689479512616e-05, | |
| "loss": 0.4878, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 2.4014003958359886e-05, | |
| "loss": 0.6636, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 2.3946029927131588e-05, | |
| "loss": 0.5448, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 2.387776956146387e-05, | |
| "loss": 0.6144, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "eval_loss": 0.5446411371231079, | |
| "eval_runtime": 191.8065, | |
| "eval_samples_per_second": 3.389, | |
| "eval_steps_per_second": 1.694, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 2.380922504615756e-05, | |
| "loss": 0.5205, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 2.3740398575108214e-05, | |
| "loss": 0.562, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 2.3671292351235926e-05, | |
| "loss": 0.5474, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 2.360190858641478e-05, | |
| "loss": 0.5526, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 2.35322495014021e-05, | |
| "loss": 0.5733, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "eval_loss": 0.5437196493148804, | |
| "eval_runtime": 191.4879, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.3462317325767344e-05, | |
| "loss": 0.5515, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.3392114297820736e-05, | |
| "loss": 0.562, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.332164266454164e-05, | |
| "loss": 0.6135, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.3250904681506645e-05, | |
| "loss": 0.5578, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.317990261281735e-05, | |
| "loss": 0.5974, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "eval_loss": 0.542205810546875, | |
| "eval_runtime": 191.7959, | |
| "eval_samples_per_second": 3.389, | |
| "eval_steps_per_second": 1.695, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.310863873102792e-05, | |
| "loss": 0.5385, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.3037115317072335e-05, | |
| "loss": 0.5564, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.296533466019139e-05, | |
| "loss": 0.5899, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.2893299057859408e-05, | |
| "loss": 0.5247, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.282101081571073e-05, | |
| "loss": 0.5307, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "eval_loss": 0.5450670719146729, | |
| "eval_runtime": 191.4809, | |
| "eval_samples_per_second": 3.395, | |
| "eval_steps_per_second": 1.697, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.2748472247465925e-05, | |
| "loss": 0.5412, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.2675685674857688e-05, | |
| "loss": 0.5335, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.2602653427556564e-05, | |
| "loss": 0.5196, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.252937784309639e-05, | |
| "loss": 0.6248, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.2455861266799454e-05, | |
| "loss": 0.5956, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "eval_loss": 0.5410928726196289, | |
| "eval_runtime": 191.4099, | |
| "eval_samples_per_second": 3.396, | |
| "eval_steps_per_second": 1.698, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.238210605170144e-05, | |
| "loss": 0.457, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.230811455847612e-05, | |
| "loss": 0.523, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.223388915535978e-05, | |
| "loss": 0.5417, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.2159432218075436e-05, | |
| "loss": 0.5996, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.208474612975679e-05, | |
| "loss": 0.4696, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "eval_loss": 0.5418115258216858, | |
| "eval_runtime": 191.7248, | |
| "eval_samples_per_second": 3.39, | |
| "eval_steps_per_second": 1.695, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.2009833280871945e-05, | |
| "loss": 0.5112, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.1934696069146912e-05, | |
| "loss": 0.5181, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.185933689948884e-05, | |
| "loss": 0.5683, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.1783758183909076e-05, | |
| "loss": 0.5505, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.1707962341445922e-05, | |
| "loss": 0.5438, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "eval_loss": 0.5391676425933838, | |
| "eval_runtime": 191.586, | |
| "eval_samples_per_second": 3.393, | |
| "eval_steps_per_second": 1.696, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.163195179808725e-05, | |
| "loss": 0.5362, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.1555728986692838e-05, | |
| "loss": 0.6366, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.147929634691649e-05, | |
| "loss": 0.5995, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.1402656325127956e-05, | |
| "loss": 0.5061, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.1325811374334653e-05, | |
| "loss": 0.5265, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "eval_loss": 0.5308849811553955, | |
| "eval_runtime": 191.5189, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.124876395410312e-05, | |
| "loss": 0.5433, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.1171516530480312e-05, | |
| "loss": 0.5884, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.109407157591467e-05, | |
| "loss": 0.5428, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.1016431569176985e-05, | |
| "loss": 0.6354, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0938598995281054e-05, | |
| "loss": 0.5366, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "eval_loss": 0.536630392074585, | |
| "eval_runtime": 191.6398, | |
| "eval_samples_per_second": 3.392, | |
| "eval_steps_per_second": 1.696, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.086057634540415e-05, | |
| "loss": 0.5373, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.0782366116807273e-05, | |
| "loss": 0.5228, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.0703970812755247e-05, | |
| "loss": 0.5507, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 2.0625392942436574e-05, | |
| "loss": 0.5548, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 2.0546635020883126e-05, | |
| "loss": 0.5636, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "eval_loss": 0.5326777696609497, | |
| "eval_runtime": 191.4908, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 2.0467699568889666e-05, | |
| "loss": 0.5958, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.0388589112933137e-05, | |
| "loss": 0.4803, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.030930618509182e-05, | |
| "loss": 0.5546, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.022985332296428e-05, | |
| "loss": 0.5919, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.0150233069588147e-05, | |
| "loss": 0.5827, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "eval_loss": 0.5313092470169067, | |
| "eval_runtime": 191.6629, | |
| "eval_samples_per_second": 3.391, | |
| "eval_steps_per_second": 1.696, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.0070447973358715e-05, | |
| "loss": 0.4907, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.9990500587947393e-05, | |
| "loss": 0.5145, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.991039347221995e-05, | |
| "loss": 0.5026, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.9830129190154632e-05, | |
| "loss": 0.428, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.974971031076008e-05, | |
| "loss": 0.6114, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "eval_loss": 0.5310894250869751, | |
| "eval_runtime": 191.4913, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.9669139407993115e-05, | |
| "loss": 0.5429, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.9588419060676354e-05, | |
| "loss": 0.5071, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.9507551852415654e-05, | |
| "loss": 0.5473, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.942654037151745e-05, | |
| "loss": 0.6174, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.9345387210905882e-05, | |
| "loss": 0.5059, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "eval_loss": 0.5289625525474548, | |
| "eval_runtime": 191.2693, | |
| "eval_samples_per_second": 3.398, | |
| "eval_steps_per_second": 1.699, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.9264094968039814e-05, | |
| "loss": 0.5765, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.91826662448297e-05, | |
| "loss": 0.5516, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.91011036475543e-05, | |
| "loss": 0.5634, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.9019409786777274e-05, | |
| "loss": 0.5611, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.8937587277263605e-05, | |
| "loss": 0.49, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "eval_loss": 0.532410204410553, | |
| "eval_runtime": 190.9087, | |
| "eval_samples_per_second": 3.405, | |
| "eval_steps_per_second": 1.702, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.8855638737895923e-05, | |
| "loss": 0.6155, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.8773566791590687e-05, | |
| "loss": 0.5359, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.8691374065214213e-05, | |
| "loss": 0.6063, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.8609063189498617e-05, | |
| "loss": 0.6356, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.852663679895761e-05, | |
| "loss": 0.5073, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "eval_loss": 0.5306385159492493, | |
| "eval_runtime": 191.0927, | |
| "eval_samples_per_second": 3.401, | |
| "eval_steps_per_second": 1.701, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.8444097531802155e-05, | |
| "loss": 0.534, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.836144802985607e-05, | |
| "loss": 0.5671, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.8278690938471412e-05, | |
| "loss": 0.6688, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.8195828906443858e-05, | |
| "loss": 0.611, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.8112864585927918e-05, | |
| "loss": 0.4695, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "eval_loss": 0.5291544198989868, | |
| "eval_runtime": 191.1623, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.8029800632352004e-05, | |
| "loss": 0.5953, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.79466397043335e-05, | |
| "loss": 0.5353, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.786338446359362e-05, | |
| "loss": 0.5027, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.7780037574872243e-05, | |
| "loss": 0.5291, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.7696601705842613e-05, | |
| "loss": 0.5365, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "eval_loss": 0.5236905217170715, | |
| "eval_runtime": 191.3719, | |
| "eval_samples_per_second": 3.397, | |
| "eval_steps_per_second": 1.698, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.7613079527025958e-05, | |
| "loss": 0.5481, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.7529473711705998e-05, | |
| "loss": 0.5448, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.7445786935843413e-05, | |
| "loss": 0.62, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.7362021877990167e-05, | |
| "loss": 0.4821, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.7278181219203783e-05, | |
| "loss": 0.5738, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "eval_loss": 0.5252307057380676, | |
| "eval_runtime": 191.0362, | |
| "eval_samples_per_second": 3.402, | |
| "eval_steps_per_second": 1.701, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.7194267642961547e-05, | |
| "loss": 0.5271, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.711028383507459e-05, | |
| "loss": 0.4599, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.7026232483601952e-05, | |
| "loss": 0.4785, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6942116278764522e-05, | |
| "loss": 0.5678, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6857937912858938e-05, | |
| "loss": 0.596, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "eval_loss": 0.5189000368118286, | |
| "eval_runtime": 191.2022, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6773700080171433e-05, | |
| "loss": 0.4796, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6689405476891577e-05, | |
| "loss": 0.536, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6605056801025995e-05, | |
| "loss": 0.4664, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6520656752311993e-05, | |
| "loss": 0.5301, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6436208032131177e-05, | |
| "loss": 0.5542, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_loss": 0.5165431499481201, | |
| "eval_runtime": 191.3721, | |
| "eval_samples_per_second": 3.397, | |
| "eval_steps_per_second": 1.698, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.635171334342297e-05, | |
| "loss": 0.4154, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.62671753905981e-05, | |
| "loss": 0.5872, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.618259687945204e-05, | |
| "loss": 0.5129, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.6097980517078428e-05, | |
| "loss": 0.5149, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.601332901178237e-05, | |
| "loss": 0.4898, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "eval_loss": 0.5182209610939026, | |
| "eval_runtime": 191.0314, | |
| "eval_samples_per_second": 3.403, | |
| "eval_steps_per_second": 1.701, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5928645072993818e-05, | |
| "loss": 0.5592, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.58439314111808e-05, | |
| "loss": 0.5329, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5759190737762694e-05, | |
| "loss": 0.4797, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5674425765023426e-05, | |
| "loss": 0.5337, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.558963920602467e-05, | |
| "loss": 0.5882, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 0.5179625749588013, | |
| "eval_runtime": 190.5486, | |
| "eval_samples_per_second": 3.411, | |
| "eval_steps_per_second": 1.706, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5504833774519017e-05, | |
| "loss": 0.6156, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5420012184863102e-05, | |
| "loss": 0.5296, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.533517715193073e-05, | |
| "loss": 0.5465, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5250331391025987e-05, | |
| "loss": 0.5149, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5165477617796322e-05, | |
| "loss": 0.5004, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "eval_loss": 0.517549455165863, | |
| "eval_runtime": 190.8109, | |
| "eval_samples_per_second": 3.407, | |
| "eval_steps_per_second": 1.703, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5080618548145639e-05, | |
| "loss": 0.4719, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.4995756898147363e-05, | |
| "loss": 0.5577, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.4910895383957512e-05, | |
| "loss": 0.4989, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.482603672172774e-05, | |
| "loss": 0.516, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.4741183627518442e-05, | |
| "loss": 0.4946, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "eval_loss": 0.5153396129608154, | |
| "eval_runtime": 190.4635, | |
| "eval_samples_per_second": 3.413, | |
| "eval_steps_per_second": 1.706, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.4656338817211783e-05, | |
| "loss": 0.5134, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.457150500642479e-05, | |
| "loss": 0.5038, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.4486684910422434e-05, | |
| "loss": 0.4284, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.440188124403071e-05, | |
| "loss": 0.4569, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.4317096721549756e-05, | |
| "loss": 0.5238, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "eval_loss": 0.5161213874816895, | |
| "eval_runtime": 190.5311, | |
| "eval_samples_per_second": 3.412, | |
| "eval_steps_per_second": 1.706, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.4232334056666983e-05, | |
| "loss": 0.5321, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.4147595962370186e-05, | |
| "loss": 0.5685, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.4062885150860765e-05, | |
| "loss": 0.5217, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.3978204333466846e-05, | |
| "loss": 0.5762, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.3893556220556566e-05, | |
| "loss": 0.5233, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "eval_loss": 0.5133797526359558, | |
| "eval_runtime": 190.6537, | |
| "eval_samples_per_second": 3.409, | |
| "eval_steps_per_second": 1.705, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.380894352145129e-05, | |
| "loss": 0.4908, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.3724368944338877e-05, | |
| "loss": 0.526, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.3639835196187049e-05, | |
| "loss": 0.4733, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.3555344982656692e-05, | |
| "loss": 0.5081, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.3470901008015313e-05, | |
| "loss": 0.5175, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "eval_loss": 0.5130189657211304, | |
| "eval_runtime": 190.879, | |
| "eval_samples_per_second": 3.405, | |
| "eval_steps_per_second": 1.703, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.3386505975050439e-05, | |
| "loss": 0.4941, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.3302162584983124e-05, | |
| "loss": 0.4777, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.3217873537381512e-05, | |
| "loss": 0.5527, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.3133641530074395e-05, | |
| "loss": 0.5204, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.3049469259064898e-05, | |
| "loss": 0.5392, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "eval_loss": 0.5122997760772705, | |
| "eval_runtime": 190.9457, | |
| "eval_samples_per_second": 3.404, | |
| "eval_steps_per_second": 1.702, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.2965359418444172e-05, | |
| "loss": 0.4622, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.2881314700305155e-05, | |
| "loss": 0.4548, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.2797337794656436e-05, | |
| "loss": 0.461, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.2713431389336117e-05, | |
| "loss": 0.4867, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.2629598169925822e-05, | |
| "loss": 0.5032, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "eval_loss": 0.5090053677558899, | |
| "eval_runtime": 190.5365, | |
| "eval_samples_per_second": 3.411, | |
| "eval_steps_per_second": 1.706, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.2545840819664724e-05, | |
| "loss": 0.5065, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.246216201936365e-05, | |
| "loss": 0.5793, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.2378564447319289e-05, | |
| "loss": 0.4568, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.2295050779228477e-05, | |
| "loss": 0.536, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.221162368810254e-05, | |
| "loss": 0.5184, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "eval_loss": 0.5121812224388123, | |
| "eval_runtime": 190.9677, | |
| "eval_samples_per_second": 3.404, | |
| "eval_steps_per_second": 1.702, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.212828584418175e-05, | |
| "loss": 0.5005, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.2045039914849853e-05, | |
| "loss": 0.4758, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.19618885645487e-05, | |
| "loss": 0.5513, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.187883445469295e-05, | |
| "loss": 0.4608, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.1795880243584922e-05, | |
| "loss": 0.5088, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "eval_loss": 0.5136343836784363, | |
| "eval_runtime": 190.6637, | |
| "eval_samples_per_second": 3.409, | |
| "eval_steps_per_second": 1.705, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.1713028586329463e-05, | |
| "loss": 0.5302, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.163028213474901e-05, | |
| "loss": 0.5131, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.1547643537298698e-05, | |
| "loss": 0.4384, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.1465115438981566e-05, | |
| "loss": 0.5021, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.1382700481263953e-05, | |
| "loss": 0.47, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "eval_loss": 0.5114705562591553, | |
| "eval_runtime": 191.1894, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.1300401301990885e-05, | |
| "loss": 0.4737, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.1218220535301717e-05, | |
| "loss": 0.5536, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.1136160811545764e-05, | |
| "loss": 0.5627, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.1054224757198145e-05, | |
| "loss": 0.4772, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.0972414994775712e-05, | |
| "loss": 0.5357, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "eval_loss": 0.5097524523735046, | |
| "eval_runtime": 190.9734, | |
| "eval_samples_per_second": 3.404, | |
| "eval_steps_per_second": 1.702, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.0890734142753085e-05, | |
| "loss": 0.5064, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.0809184815478894e-05, | |
| "loss": 0.4436, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.0727769623092067e-05, | |
| "loss": 0.5023, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.0646491171438273e-05, | |
| "loss": 0.5627, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.0565352061986567e-05, | |
| "loss": 0.4996, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "eval_loss": 0.509343147277832, | |
| "eval_runtime": 190.7855, | |
| "eval_samples_per_second": 3.407, | |
| "eval_steps_per_second": 1.703, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.0484354891746067e-05, | |
| "loss": 0.5062, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.0403502253182883e-05, | |
| "loss": 0.4636, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.0322796734137114e-05, | |
| "loss": 0.4952, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.0242240917740015e-05, | |
| "loss": 0.542, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.0161837382331329e-05, | |
| "loss": 0.5524, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "eval_loss": 0.5103505253791809, | |
| "eval_runtime": 190.7615, | |
| "eval_samples_per_second": 3.407, | |
| "eval_steps_per_second": 1.704, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.0081588701376768e-05, | |
| "loss": 0.4713, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.000149744338563e-05, | |
| "loss": 0.4149, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 9.921566171828606e-06, | |
| "loss": 0.511, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 9.841797445055705e-06, | |
| "loss": 0.4994, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 9.762193816214406e-06, | |
| "loss": 0.575, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "eval_loss": 0.5110099911689758, | |
| "eval_runtime": 190.6444, | |
| "eval_samples_per_second": 3.409, | |
| "eval_steps_per_second": 1.705, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 9.6827578331679e-06, | |
| "loss": 0.479, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 9.603492038413576e-06, | |
| "loss": 0.5531, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.524398969001617e-06, | |
| "loss": 0.4417, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.445481156453813e-06, | |
| "loss": 0.4834, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.36674112668254e-06, | |
| "loss": 0.4943, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "eval_loss": 0.5091186761856079, | |
| "eval_runtime": 191.3582, | |
| "eval_samples_per_second": 3.397, | |
| "eval_steps_per_second": 1.698, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 9.288181399909879e-06, | |
| "loss": 0.5398, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 9.209804490587e-06, | |
| "loss": 0.4653, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 9.13161290731364e-06, | |
| "loss": 0.5042, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 9.053609152757839e-06, | |
| "loss": 0.5113, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 8.97579572357583e-06, | |
| "loss": 0.4933, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "eval_loss": 0.5072566270828247, | |
| "eval_runtime": 190.9356, | |
| "eval_samples_per_second": 3.404, | |
| "eval_steps_per_second": 1.702, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 8.898175110332117e-06, | |
| "loss": 0.4885, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 8.820749797419775e-06, | |
| "loss": 0.5553, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 8.743522262980915e-06, | |
| "loss": 0.4913, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 8.666494978827402e-06, | |
| "loss": 0.4873, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 8.589670410361687e-06, | |
| "loss": 0.4441, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "eval_loss": 0.5085570216178894, | |
| "eval_runtime": 191.2682, | |
| "eval_samples_per_second": 3.398, | |
| "eval_steps_per_second": 1.699, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 8.51305101649795e-06, | |
| "loss": 0.4685, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 8.436639249583352e-06, | |
| "loss": 0.5195, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 8.360437555319584e-06, | |
| "loss": 0.6368, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 8.284448372684552e-06, | |
| "loss": 0.5241, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 8.208674133854349e-06, | |
| "loss": 0.5472, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "eval_loss": 0.5064599514007568, | |
| "eval_runtime": 191.2657, | |
| "eval_samples_per_second": 3.398, | |
| "eval_steps_per_second": 1.699, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.133117264125364e-06, | |
| "loss": 0.6318, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.057780181836706e-06, | |
| "loss": 0.5765, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 7.98266529829275e-06, | |
| "loss": 0.5019, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 7.90777501768601e-06, | |
| "loss": 0.4847, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 7.83311173702015e-06, | |
| "loss": 0.5409, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "eval_loss": 0.508000373840332, | |
| "eval_runtime": 190.7648, | |
| "eval_samples_per_second": 3.407, | |
| "eval_steps_per_second": 1.704, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 7.758677846033267e-06, | |
| "loss": 0.4938, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 7.684475727121431e-06, | |
| "loss": 0.5051, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 7.6105077552623935e-06, | |
| "loss": 0.5624, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 7.536776297939602e-06, | |
| "loss": 0.5206, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 7.4632837150664214e-06, | |
| "loss": 0.495, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "eval_loss": 0.5080721378326416, | |
| "eval_runtime": 190.9059, | |
| "eval_samples_per_second": 3.405, | |
| "eval_steps_per_second": 1.702, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 7.390032358910568e-06, | |
| "loss": 0.4812, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 7.317024574018875e-06, | |
| "loss": 0.4985, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 7.244262697142191e-06, | |
| "loss": 0.4753, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 7.171749057160641e-06, | |
| "loss": 0.4756, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 7.099485975009062e-06, | |
| "loss": 0.4447, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "eval_loss": 0.508942723274231, | |
| "eval_runtime": 190.432, | |
| "eval_samples_per_second": 3.413, | |
| "eval_steps_per_second": 1.707, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 7.0274757636027e-06, | |
| "loss": 0.5108, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.955720727763222e-06, | |
| "loss": 0.5241, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.884223164144902e-06, | |
| "loss": 0.4555, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.812985361161156e-06, | |
| "loss": 0.4658, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.742009598911249e-06, | |
| "loss": 0.4664, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "eval_loss": 0.509616494178772, | |
| "eval_runtime": 191.0031, | |
| "eval_samples_per_second": 3.403, | |
| "eval_steps_per_second": 1.702, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.6712981491073664e-06, | |
| "loss": 0.5579, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.600853275001867e-06, | |
| "loss": 0.4358, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.530677231314851e-06, | |
| "loss": 0.5503, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 6.460772264162e-06, | |
| "loss": 0.5437, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 6.3911406109826965e-06, | |
| "loss": 0.4979, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "eval_loss": 0.5052404999732971, | |
| "eval_runtime": 190.5662, | |
| "eval_samples_per_second": 3.411, | |
| "eval_steps_per_second": 1.705, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 6.321784500468374e-06, | |
| "loss": 0.4925, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 6.252706152491232e-06, | |
| "loss": 0.447, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 6.183907778033137e-06, | |
| "loss": 0.5297, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 6.115391579114901e-06, | |
| "loss": 0.5119, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 6.047159748725758e-06, | |
| "loss": 0.5215, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "eval_loss": 0.5041984915733337, | |
| "eval_runtime": 191.1975, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.9792144707532084e-06, | |
| "loss": 0.4897, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.911557919913109e-06, | |
| "loss": 0.5036, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.844192261680048e-06, | |
| "loss": 0.4816, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.7771196522180685e-06, | |
| "loss": 0.5659, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.710342238311621e-06, | |
| "loss": 0.4436, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "eval_loss": 0.5044705867767334, | |
| "eval_runtime": 191.0774, | |
| "eval_samples_per_second": 3.402, | |
| "eval_steps_per_second": 1.701, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.643862157296886e-06, | |
| "loss": 0.5258, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.577681536993347e-06, | |
| "loss": 0.5132, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.511802495635669e-06, | |
| "loss": 0.5112, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.446227141805942e-06, | |
| "loss": 0.5517, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.380957574366146e-06, | |
| "loss": 0.5381, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "eval_loss": 0.5044350028038025, | |
| "eval_runtime": 190.9022, | |
| "eval_samples_per_second": 3.405, | |
| "eval_steps_per_second": 1.702, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.31599588239101e-06, | |
| "loss": 0.4824, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.251344145101133e-06, | |
| "loss": 0.5973, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.187004431796419e-06, | |
| "loss": 0.5148, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.122978801789878e-06, | |
| "loss": 0.5223, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.0592693043416774e-06, | |
| "loss": 0.4965, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "eval_loss": 0.5032292008399963, | |
| "eval_runtime": 191.5156, | |
| "eval_samples_per_second": 3.394, | |
| "eval_steps_per_second": 1.697, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.995877978593587e-06, | |
| "loss": 0.5134, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.932806853503693e-06, | |
| "loss": 0.5154, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.8700579477814465e-06, | |
| "loss": 0.4608, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.80763326982308e-06, | |
| "loss": 0.5015, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.745534817647303e-06, | |
| "loss": 0.4185, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "eval_loss": 0.5041232705116272, | |
| "eval_runtime": 191.3747, | |
| "eval_samples_per_second": 3.396, | |
| "eval_steps_per_second": 1.698, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.683764578831349e-06, | |
| "loss": 0.5206, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.62232453044739e-06, | |
| "loss": 0.4842, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.561216638999208e-06, | |
| "loss": 0.4638, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.50044286035931e-06, | |
| "loss": 0.4863, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.440005139706266e-06, | |
| "loss": 0.4481, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "eval_loss": 0.5021231174468994, | |
| "eval_runtime": 191.2503, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.699, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.37990541146251e-06, | |
| "loss": 0.5067, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.320145599232371e-06, | |
| "loss": 0.5252, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.260727615740546e-06, | |
| "loss": 0.4833, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.201653362770862e-06, | |
| "loss": 0.5131, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.142924731105393e-06, | |
| "loss": 0.5035, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "eval_loss": 0.5021679401397705, | |
| "eval_runtime": 191.0641, | |
| "eval_samples_per_second": 3.402, | |
| "eval_steps_per_second": 1.701, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.084543600463976e-06, | |
| "loss": 0.4262, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.026511839444007e-06, | |
| "loss": 0.4519, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 3.9688313054606686e-06, | |
| "loss": 0.4888, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 3.911503844687463e-06, | |
| "loss": 0.4635, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.854531291997109e-06, | |
| "loss": 0.4038, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "eval_loss": 0.5035643577575684, | |
| "eval_runtime": 190.8106, | |
| "eval_samples_per_second": 3.407, | |
| "eval_steps_per_second": 1.703, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.797915470902848e-06, | |
| "loss": 0.5212, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.7416581935000387e-06, | |
| "loss": 0.4885, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 3.68576126040819e-06, | |
| "loss": 0.5139, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 3.63022646071332e-06, | |
| "loss": 0.4963, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 3.575055571910674e-06, | |
| "loss": 0.5086, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "eval_loss": 0.5035145282745361, | |
| "eval_runtime": 191.2098, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.7, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.5202503598478635e-06, | |
| "loss": 0.4843, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.4658125786683188e-06, | |
| "loss": 0.4477, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.4117439707551624e-06, | |
| "loss": 0.4563, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.3580462666754425e-06, | |
| "loss": 0.569, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.3047211851247184e-06, | |
| "loss": 0.512, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "eval_loss": 0.502407968044281, | |
| "eval_runtime": 191.0877, | |
| "eval_samples_per_second": 3.402, | |
| "eval_steps_per_second": 1.701, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.251770432872086e-06, | |
| "loss": 0.47, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.1991957047055135e-06, | |
| "loss": 0.4653, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.146998683377632e-06, | |
| "loss": 0.4625, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.0951810395518453e-06, | |
| "loss": 0.4331, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.043744431748868e-06, | |
| "loss": 0.4631, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "eval_loss": 0.5034356117248535, | |
| "eval_runtime": 190.5825, | |
| "eval_samples_per_second": 3.411, | |
| "eval_steps_per_second": 1.705, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.9926905062936572e-06, | |
| "loss": 0.5053, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.9420208972626876e-06, | |
| "loss": 0.5019, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.8917372264316865e-06, | |
| "loss": 0.5234, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.8418411032236868e-06, | |
| "loss": 0.4805, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.792334124657546e-06, | |
| "loss": 0.4992, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "eval_loss": 0.5018548369407654, | |
| "eval_runtime": 190.6677, | |
| "eval_samples_per_second": 3.409, | |
| "eval_steps_per_second": 1.705, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.7432178752968202e-06, | |
| "loss": 0.4904, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.694493927199036e-06, | |
| "loss": 0.4917, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.6461638398653915e-06, | |
| "loss": 0.5452, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.5982291601908288e-06, | |
| "loss": 0.5509, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.550691422414528e-06, | |
| "loss": 0.5503, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "eval_loss": 0.5017884969711304, | |
| "eval_runtime": 190.926, | |
| "eval_samples_per_second": 3.404, | |
| "eval_steps_per_second": 1.702, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.503552148070809e-06, | |
| "loss": 0.4887, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.4568128459404076e-06, | |
| "loss": 0.531, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.4104750120022167e-06, | |
| "loss": 0.4981, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.364540129385371e-06, | |
| "loss": 0.5331, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.319009668321808e-06, | |
| "loss": 0.4813, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "eval_loss": 0.5015760064125061, | |
| "eval_runtime": 190.7169, | |
| "eval_samples_per_second": 3.408, | |
| "eval_steps_per_second": 1.704, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.2738850860991946e-06, | |
| "loss": 0.5037, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.2291678270142717e-06, | |
| "loss": 0.4909, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.1848593223266588e-06, | |
| "loss": 0.4483, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.1409609902130102e-06, | |
| "loss": 0.5362, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.0974742357216497e-06, | |
| "loss": 0.554, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "eval_loss": 0.501855194568634, | |
| "eval_runtime": 191.1529, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.0544004507275864e-06, | |
| "loss": 0.5077, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.0117410138879588e-06, | |
| "loss": 0.5279, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.9694972905979304e-06, | |
| "loss": 0.5105, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.927670632946961e-06, | |
| "loss": 0.4754, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.886262379675557e-06, | |
| "loss": 0.4033, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "eval_loss": 0.5017901062965393, | |
| "eval_runtime": 191.1221, | |
| "eval_samples_per_second": 3.401, | |
| "eval_steps_per_second": 1.7, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.8452738561323978e-06, | |
| "loss": 0.4938, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.804706374231928e-06, | |
| "loss": 0.5114, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.764561232412376e-06, | |
| "loss": 0.4786, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.724839715594172e-06, | |
| "loss": 0.5134, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.6855430951388435e-06, | |
| "loss": 0.507, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "eval_loss": 0.501107931137085, | |
| "eval_runtime": 190.644, | |
| "eval_samples_per_second": 3.409, | |
| "eval_steps_per_second": 1.705, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.6466726288083151e-06, | |
| "loss": 0.5014, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.6082295607246405e-06, | |
| "loss": 0.5023, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.5702151213302062e-06, | |
| "loss": 0.4765, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.5326305273483226e-06, | |
| "loss": 0.493, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.4954769817442986e-06, | |
| "loss": 0.5168, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "eval_loss": 0.501087486743927, | |
| "eval_runtime": 190.6616, | |
| "eval_samples_per_second": 3.409, | |
| "eval_steps_per_second": 1.705, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.4587556736869363e-06, | |
| "loss": 0.5424, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.422467778510455e-06, | |
| "loss": 0.3973, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3866144576768952e-06, | |
| "loss": 0.4886, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3511968587389212e-06, | |
| "loss": 0.5631, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3162161153031132e-06, | |
| "loss": 0.498, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "eval_loss": 0.501248300075531, | |
| "eval_runtime": 191.1768, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.2816733469936599e-06, | |
| "loss": 0.4845, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.247569659416546e-06, | |
| "loss": 0.4733, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.2139061441241551e-06, | |
| "loss": 0.5307, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.1806838785803253e-06, | |
| "loss": 0.4248, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.147903926125879e-06, | |
| "loss": 0.4781, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "eval_loss": 0.501189649105072, | |
| "eval_runtime": 191.222, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.7, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.115567335944569e-06, | |
| "loss": 0.4669, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.08367514302952e-06, | |
| "loss": 0.4994, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.052228368150086e-06, | |
| "loss": 0.433, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.021228017819177e-06, | |
| "loss": 0.5377, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.906750842610584e-07, | |
| "loss": 0.4656, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "eval_loss": 0.5004976987838745, | |
| "eval_runtime": 191.4015, | |
| "eval_samples_per_second": 3.396, | |
| "eval_steps_per_second": 1.698, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.605705453795772e-07, | |
| "loss": 0.5063, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.309153647268725e-07, | |
| "loss": 0.5088, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.017104914725294e-07, | |
| "loss": 0.4616, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.729568603732047e-07, | |
| "loss": 0.4933, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 8.446553917427053e-07, | |
| "loss": 0.5745, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "eval_loss": 0.500635027885437, | |
| "eval_runtime": 191.1355, | |
| "eval_samples_per_second": 3.401, | |
| "eval_steps_per_second": 1.7, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 8.168069914225229e-07, | |
| "loss": 0.5294, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.894125507528627e-07, | |
| "loss": 0.4279, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.624729465440939e-07, | |
| "loss": 0.489, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.359890410486908e-07, | |
| "loss": 0.5173, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.099616819336468e-07, | |
| "loss": 0.5095, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "eval_loss": 0.5005881786346436, | |
| "eval_runtime": 191.0033, | |
| "eval_samples_per_second": 3.403, | |
| "eval_steps_per_second": 1.702, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 6.843917022533191e-07, | |
| "loss": 0.5607, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 6.592799204227917e-07, | |
| "loss": 0.4391, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 6.346271401916681e-07, | |
| "loss": 0.5066, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 6.104341506183353e-07, | |
| "loss": 0.5036, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 5.867017260447305e-07, | |
| "loss": 0.4743, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "eval_loss": 0.5005041360855103, | |
| "eval_runtime": 190.9941, | |
| "eval_samples_per_second": 3.403, | |
| "eval_steps_per_second": 1.702, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 5.634306260715393e-07, | |
| "loss": 0.5054, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 5.406215955338939e-07, | |
| "loss": 0.51, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 5.182753644775279e-07, | |
| "loss": 0.4448, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.963926481354059e-07, | |
| "loss": 0.5601, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.749741469048446e-07, | |
| "loss": 0.5316, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "eval_loss": 0.5004457235336304, | |
| "eval_runtime": 190.9118, | |
| "eval_samples_per_second": 3.405, | |
| "eval_steps_per_second": 1.702, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.540205463250796e-07, | |
| "loss": 0.5286, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.3353251705533557e-07, | |
| "loss": 0.5409, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 4.135107148533507e-07, | |
| "loss": 0.4917, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.939557805543964e-07, | |
| "loss": 0.5177, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.7486834005076574e-07, | |
| "loss": 0.5014, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "eval_loss": 0.5002537965774536, | |
| "eval_runtime": 191.0993, | |
| "eval_samples_per_second": 3.401, | |
| "eval_steps_per_second": 1.701, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.5624900427172755e-07, | |
| "loss": 0.5148, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.3809836916399384e-07, | |
| "loss": 0.5025, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.204170156726233e-07, | |
| "loss": 0.5114, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.0320550972244953e-07, | |
| "loss": 0.4997, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.864644021999424e-07, | |
| "loss": 0.4273, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "eval_loss": 0.5002052783966064, | |
| "eval_runtime": 191.2235, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.7, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.701942289356019e-07, | |
| "loss": 0.4546, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.543955106867868e-07, | |
| "loss": 0.4597, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.390687531210584e-07, | |
| "loss": 0.5546, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.2421444679999125e-07, | |
| "loss": 0.4767, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.0983306716347628e-07, | |
| "loss": 0.4237, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "eval_loss": 0.5002537369728088, | |
| "eval_runtime": 191.0748, | |
| "eval_samples_per_second": 3.402, | |
| "eval_steps_per_second": 1.701, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.959250745144958e-07, | |
| "loss": 0.5814, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.8249091400439898e-07, | |
| "loss": 0.545, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.695310156186497e-07, | |
| "loss": 0.4825, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.570457941630593e-07, | |
| "loss": 0.5452, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.4503564925052558e-07, | |
| "loss": 0.47, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "eval_loss": 0.5003031492233276, | |
| "eval_runtime": 191.1598, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.3350096528821964e-07, | |
| "loss": 0.4873, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.2244211146530403e-07, | |
| "loss": 0.4904, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.118594417411023e-07, | |
| "loss": 0.5254, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.017532948337746e-07, | |
| "loss": 0.5274, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 9.212399420947815e-08, | |
| "loss": 0.4668, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "eval_loss": 0.5002000331878662, | |
| "eval_runtime": 191.2089, | |
| "eval_samples_per_second": 3.399, | |
| "eval_steps_per_second": 1.7, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 8.297184807200875e-08, | |
| "loss": 0.5126, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.429714935294374e-08, | |
| "loss": 0.4697, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 6.610017570225613e-08, | |
| "loss": 0.5164, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.838118947943671e-08, | |
| "loss": 0.4546, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.1140437745095805e-08, | |
| "loss": 0.523, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "eval_loss": 0.5001567006111145, | |
| "eval_runtime": 191.2027, | |
| "eval_samples_per_second": 3.4, | |
| "eval_steps_per_second": 1.7, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 4.437815225304953e-08, | |
| "loss": 0.4334, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 3.809454944290913e-08, | |
| "loss": 0.4872, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 3.228983043314648e-08, | |
| "loss": 0.5156, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 2.6964181014664245e-08, | |
| "loss": 0.5035, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 2.2117771644843966e-08, | |
| "loss": 0.479, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "eval_loss": 0.5002320408821106, | |
| "eval_runtime": 190.8457, | |
| "eval_samples_per_second": 3.406, | |
| "eval_steps_per_second": 1.703, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.775075744209209e-08, | |
| "loss": 0.5611, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.3863278180875626e-08, | |
| "loss": 0.5446, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.0455458287249031e-08, | |
| "loss": 0.5095, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 7.527406834869077e-09, | |
| "loss": 0.4258, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.079217541504311e-09, | |
| "loss": 0.4931, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "eval_loss": 0.500203013420105, | |
| "eval_runtime": 191.3658, | |
| "eval_samples_per_second": 3.397, | |
| "eval_steps_per_second": 1.698, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 3.110968766035782e-09, | |
| "loss": 0.5026, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.622723505954049e-09, | |
| "loss": 0.5178, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.145293953357944e-10, | |
| "loss": 0.5132, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 8.641870331838053e-11, | |
| "loss": 0.5834, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 5846, | |
| "total_flos": 2.2293554934684058e+17, | |
| "train_loss": 0.5801442559761337, | |
| "train_runtime": 31039.6997, | |
| "train_samples_per_second": 0.377, | |
| "train_steps_per_second": 0.188 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5846, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "total_flos": 2.2293554934684058e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |