| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "global_step": 20373, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.882079900826349e-05, | |
| "loss": 1.4754, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.539443723136697e-05, | |
| "loss": 1.3547, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.004414420573958e-05, | |
| "loss": 1.3184, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.3274645598733375e-05, | |
| "loss": 1.3174, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 2.5724549368023616e-05, | |
| "loss": 1.2904, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.8106101990647016e-05, | |
| "loss": 1.2746, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.1137997922835163e-05, | |
| "loss": 1.2553, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 5.477580782771013e-06, | |
| "loss": 1.2449, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.6588321308710408e-06, | |
| "loss": 1.2664, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.1997742936272785e-08, | |
| "loss": 1.2583, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.796034360648269e-07, | |
| "loss": 1.2692, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.802066381068883e-06, | |
| "loss": 1.2549, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.8242592737696e-06, | |
| "loss": 1.2621, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5372408126985777e-05, | |
| "loss": 1.2511, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.282878625097739e-05, | |
| "loss": 1.2055, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.0489988167458534e-05, | |
| "loss": 1.2219, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.76332861246046e-05, | |
| "loss": 1.2093, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 4.3584809399588655e-05, | |
| "loss": 1.2115, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.778311462055443e-05, | |
| "loss": 1.2122, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 4.983215013308785e-05, | |
| "loss": 1.1913, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.953861796051032e-05, | |
| "loss": 1.1973, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.6930208777142614e-05, | |
| "loss": 1.2112, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.225298967865532e-05, | |
| "loss": 1.1787, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 3.59481911770088e-05, | |
| "loss": 1.2036, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.8610583243870337e-05, | |
| "loss": 1.1818, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.09323670433783e-05, | |
| "loss": 1.1674, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.363787538820378e-05, | |
| "loss": 1.1715, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.4152420218663585e-06, | |
| "loss": 1.122, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 2.8514857793098454e-06, | |
| "loss": 1.0968, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.771335315155861e-07, | |
| "loss": 1.1392, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.2560596844390935e-07, | |
| "loss": 1.1046, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.4111976063067728e-06, | |
| "loss": 1.15, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 6.727728298952271e-06, | |
| "loss": 1.1218, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.2767993464490152e-05, | |
| "loss": 1.1315, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.9962178169036043e-05, | |
| "loss": 1.119, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.7631611233523096e-05, | |
| "loss": 1.1307, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 3.505278841189536e-05, | |
| "loss": 1.144, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.152562494506565e-05, | |
| "loss": 1.1165, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 4.643949881229301e-05, | |
| "loss": 1.1387, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 4.933085441857718e-05, | |
| "loss": 1.1241, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 20373, | |
| "total_flos": 5.063263464534835e+16, | |
| "train_loss": 1.2059516395565477, | |
| "train_runtime": 8228.1791, | |
| "train_samples_per_second": 39.613, | |
| "train_steps_per_second": 2.476 | |
| } | |
| ], | |
| "max_steps": 20373, | |
| "num_train_epochs": 3, | |
| "total_flos": 5.063263464534835e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |