1494 lines
30 KiB
JSON
1494 lines
30 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 5.0,
|
|
"global_step": 122720,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 1.9456757931334205e-05,
|
|
"loss": 0.8613,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 1.8913515862668405e-05,
|
|
"loss": 0.7137,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 1.837027379400261e-05,
|
|
"loss": 0.664,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 1.7827031725336812e-05,
|
|
"loss": 0.6249,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 1.7283789656671015e-05,
|
|
"loss": 0.6086,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 1.6740547588005215e-05,
|
|
"loss": 0.5861,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 1.619730551933942e-05,
|
|
"loss": 0.5828,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 1.5654063450673622e-05,
|
|
"loss": 0.5786,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 1.5110821382007822e-05,
|
|
"loss": 0.5533,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 1.4567579313342026e-05,
|
|
"loss": 0.582,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 1.402433724467623e-05,
|
|
"loss": 0.5415,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.3481095176010431e-05,
|
|
"loss": 0.5371,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 1.8940677966101697e-05,
|
|
"loss": 0.5397,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 1.8859191655801828e-05,
|
|
"loss": 0.5201,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 1.8777705345501956e-05,
|
|
"loss": 0.5129,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 1.8696219035202087e-05,
|
|
"loss": 0.5152,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 1.861473272490222e-05,
|
|
"loss": 0.5041,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 1.853324641460235e-05,
|
|
"loss": 0.5183,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 1.8451760104302477e-05,
|
|
"loss": 0.5183,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 1.837027379400261e-05,
|
|
"loss": 0.5126,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 1.828878748370274e-05,
|
|
"loss": 0.5179,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 1.820730117340287e-05,
|
|
"loss": 0.4975,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 1.8125814863103e-05,
|
|
"loss": 0.5099,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 1.804432855280313e-05,
|
|
"loss": 0.4978,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 1.796284224250326e-05,
|
|
"loss": 0.4882,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 1.788135593220339e-05,
|
|
"loss": 0.4891,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 1.7799869621903524e-05,
|
|
"loss": 0.4992,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 1.771838331160365e-05,
|
|
"loss": 0.4922,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 1.7636897001303783e-05,
|
|
"loss": 0.4901,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 1.7555410691003914e-05,
|
|
"loss": 0.4972,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 1.7473924380704045e-05,
|
|
"loss": 0.476,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 1.7392438070404173e-05,
|
|
"loss": 0.4918,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 1.7310951760104304e-05,
|
|
"loss": 0.4856,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 1.7229465449804435e-05,
|
|
"loss": 0.4855,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 1.7147979139504566e-05,
|
|
"loss": 0.485,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 1.7066492829204694e-05,
|
|
"loss": 0.4855,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 1.6985006518904825e-05,
|
|
"loss": 0.4817,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 1.6903520208604957e-05,
|
|
"loss": 0.5006,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 1.6822033898305084e-05,
|
|
"loss": 0.4814,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 1.6740547588005215e-05,
|
|
"loss": 0.4944,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 1.6659061277705347e-05,
|
|
"loss": 0.4805,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 1.6577574967405478e-05,
|
|
"loss": 0.4927,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 1.6496088657105606e-05,
|
|
"loss": 0.4699,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 1.6414602346805737e-05,
|
|
"loss": 0.4839,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.6333116036505868e-05,
|
|
"loss": 0.4795,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.6251629726206e-05,
|
|
"loss": 0.4888,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 1.617014341590613e-05,
|
|
"loss": 0.4744,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.608865710560626e-05,
|
|
"loss": 0.473,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 1.600717079530639e-05,
|
|
"loss": 0.4739,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"learning_rate": 1.592568448500652e-05,
|
|
"loss": 0.3848,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 1.04,
|
|
"learning_rate": 1.5844198174706652e-05,
|
|
"loss": 0.3872,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"epoch": 1.06,
|
|
"learning_rate": 1.576271186440678e-05,
|
|
"loss": 0.3878,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"epoch": 1.08,
|
|
"learning_rate": 1.568122555410691e-05,
|
|
"loss": 0.3813,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"learning_rate": 1.5599739243807042e-05,
|
|
"loss": 0.3873,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"epoch": 1.12,
|
|
"learning_rate": 1.5518252933507173e-05,
|
|
"loss": 0.3923,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"epoch": 1.14,
|
|
"learning_rate": 1.54367666232073e-05,
|
|
"loss": 0.3901,
|
|
"step": 28000
|
|
},
|
|
{
|
|
"epoch": 1.16,
|
|
"learning_rate": 1.5355280312907432e-05,
|
|
"loss": 0.3889,
|
|
"step": 28500
|
|
},
|
|
{
|
|
"epoch": 1.18,
|
|
"learning_rate": 1.5273794002607563e-05,
|
|
"loss": 0.38,
|
|
"step": 29000
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"learning_rate": 1.5192307692307693e-05,
|
|
"loss": 0.388,
|
|
"step": 29500
|
|
},
|
|
{
|
|
"epoch": 1.22,
|
|
"learning_rate": 1.5110821382007822e-05,
|
|
"loss": 0.3993,
|
|
"step": 30000
|
|
},
|
|
{
|
|
"epoch": 1.24,
|
|
"learning_rate": 1.5029335071707954e-05,
|
|
"loss": 0.3911,
|
|
"step": 30500
|
|
},
|
|
{
|
|
"epoch": 1.26,
|
|
"learning_rate": 1.4947848761408083e-05,
|
|
"loss": 0.393,
|
|
"step": 31000
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 1.4866362451108216e-05,
|
|
"loss": 0.3804,
|
|
"step": 31500
|
|
},
|
|
{
|
|
"epoch": 1.3,
|
|
"learning_rate": 1.4784876140808346e-05,
|
|
"loss": 0.3983,
|
|
"step": 32000
|
|
},
|
|
{
|
|
"epoch": 1.32,
|
|
"learning_rate": 1.4703389830508477e-05,
|
|
"loss": 0.3978,
|
|
"step": 32500
|
|
},
|
|
{
|
|
"epoch": 1.34,
|
|
"learning_rate": 1.4621903520208606e-05,
|
|
"loss": 0.4031,
|
|
"step": 33000
|
|
},
|
|
{
|
|
"epoch": 1.36,
|
|
"learning_rate": 1.4540417209908737e-05,
|
|
"loss": 0.3871,
|
|
"step": 33500
|
|
},
|
|
{
|
|
"epoch": 1.39,
|
|
"learning_rate": 1.4458930899608867e-05,
|
|
"loss": 0.4003,
|
|
"step": 34000
|
|
},
|
|
{
|
|
"epoch": 1.41,
|
|
"learning_rate": 1.4377444589308998e-05,
|
|
"loss": 0.3789,
|
|
"step": 34500
|
|
},
|
|
{
|
|
"epoch": 1.43,
|
|
"learning_rate": 1.4295958279009128e-05,
|
|
"loss": 0.3932,
|
|
"step": 35000
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"learning_rate": 1.4214471968709259e-05,
|
|
"loss": 0.3867,
|
|
"step": 35500
|
|
},
|
|
{
|
|
"epoch": 1.47,
|
|
"learning_rate": 1.4132985658409388e-05,
|
|
"loss": 0.391,
|
|
"step": 36000
|
|
},
|
|
{
|
|
"epoch": 1.49,
|
|
"learning_rate": 1.4051499348109518e-05,
|
|
"loss": 0.3852,
|
|
"step": 36500
|
|
},
|
|
{
|
|
"epoch": 1.51,
|
|
"learning_rate": 1.3970013037809649e-05,
|
|
"loss": 0.3865,
|
|
"step": 37000
|
|
},
|
|
{
|
|
"epoch": 1.53,
|
|
"learning_rate": 1.3888526727509778e-05,
|
|
"loss": 0.3932,
|
|
"step": 37500
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"learning_rate": 1.380704041720991e-05,
|
|
"loss": 0.3957,
|
|
"step": 38000
|
|
},
|
|
{
|
|
"epoch": 1.57,
|
|
"learning_rate": 1.372555410691004e-05,
|
|
"loss": 0.3908,
|
|
"step": 38500
|
|
},
|
|
{
|
|
"epoch": 1.59,
|
|
"learning_rate": 1.364406779661017e-05,
|
|
"loss": 0.3984,
|
|
"step": 39000
|
|
},
|
|
{
|
|
"epoch": 1.61,
|
|
"learning_rate": 1.35625814863103e-05,
|
|
"loss": 0.394,
|
|
"step": 39500
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 1.3481095176010431e-05,
|
|
"loss": 0.3937,
|
|
"step": 40000
|
|
},
|
|
{
|
|
"epoch": 1.65,
|
|
"learning_rate": 1.3399608865710562e-05,
|
|
"loss": 0.4022,
|
|
"step": 40500
|
|
},
|
|
{
|
|
"epoch": 1.67,
|
|
"learning_rate": 1.3318122555410693e-05,
|
|
"loss": 0.3872,
|
|
"step": 41000
|
|
},
|
|
{
|
|
"epoch": 1.69,
|
|
"learning_rate": 1.3236636245110823e-05,
|
|
"loss": 0.3815,
|
|
"step": 41500
|
|
},
|
|
{
|
|
"epoch": 1.71,
|
|
"learning_rate": 1.3155149934810954e-05,
|
|
"loss": 0.3853,
|
|
"step": 42000
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 1.3073663624511084e-05,
|
|
"loss": 0.3913,
|
|
"step": 42500
|
|
},
|
|
{
|
|
"epoch": 1.75,
|
|
"learning_rate": 1.2992177314211213e-05,
|
|
"loss": 0.3913,
|
|
"step": 43000
|
|
},
|
|
{
|
|
"epoch": 1.77,
|
|
"learning_rate": 1.2910691003911344e-05,
|
|
"loss": 0.3931,
|
|
"step": 43500
|
|
},
|
|
{
|
|
"epoch": 1.79,
|
|
"learning_rate": 1.2829204693611474e-05,
|
|
"loss": 0.3929,
|
|
"step": 44000
|
|
},
|
|
{
|
|
"epoch": 1.81,
|
|
"learning_rate": 1.2747718383311605e-05,
|
|
"loss": 0.3772,
|
|
"step": 44500
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"learning_rate": 1.2666232073011735e-05,
|
|
"loss": 0.3892,
|
|
"step": 45000
|
|
},
|
|
{
|
|
"epoch": 1.85,
|
|
"learning_rate": 1.2584745762711866e-05,
|
|
"loss": 0.3939,
|
|
"step": 45500
|
|
},
|
|
{
|
|
"epoch": 1.87,
|
|
"learning_rate": 1.2503259452411995e-05,
|
|
"loss": 0.3979,
|
|
"step": 46000
|
|
},
|
|
{
|
|
"epoch": 1.89,
|
|
"learning_rate": 1.2421773142112126e-05,
|
|
"loss": 0.3779,
|
|
"step": 46500
|
|
},
|
|
{
|
|
"epoch": 1.91,
|
|
"learning_rate": 1.2340286831812256e-05,
|
|
"loss": 0.3921,
|
|
"step": 47000
|
|
},
|
|
{
|
|
"epoch": 1.94,
|
|
"learning_rate": 1.2258800521512385e-05,
|
|
"loss": 0.3848,
|
|
"step": 47500
|
|
},
|
|
{
|
|
"epoch": 1.96,
|
|
"learning_rate": 1.2177314211212517e-05,
|
|
"loss": 0.3868,
|
|
"step": 48000
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 1.2095827900912646e-05,
|
|
"loss": 0.3853,
|
|
"step": 48500
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"learning_rate": 1.2014341590612777e-05,
|
|
"loss": 0.3865,
|
|
"step": 49000
|
|
},
|
|
{
|
|
"epoch": 2.02,
|
|
"learning_rate": 1.193285528031291e-05,
|
|
"loss": 0.2994,
|
|
"step": 49500
|
|
},
|
|
{
|
|
"epoch": 2.04,
|
|
"learning_rate": 1.185136897001304e-05,
|
|
"loss": 0.2864,
|
|
"step": 50000
|
|
},
|
|
{
|
|
"epoch": 2.06,
|
|
"learning_rate": 1.176988265971317e-05,
|
|
"loss": 0.2874,
|
|
"step": 50500
|
|
},
|
|
{
|
|
"epoch": 2.08,
|
|
"learning_rate": 1.16883963494133e-05,
|
|
"loss": 0.2832,
|
|
"step": 51000
|
|
},
|
|
{
|
|
"epoch": 2.1,
|
|
"learning_rate": 1.160691003911343e-05,
|
|
"loss": 0.27,
|
|
"step": 51500
|
|
},
|
|
{
|
|
"epoch": 2.12,
|
|
"learning_rate": 1.1525423728813561e-05,
|
|
"loss": 0.2907,
|
|
"step": 52000
|
|
},
|
|
{
|
|
"epoch": 2.14,
|
|
"learning_rate": 1.144393741851369e-05,
|
|
"loss": 0.2772,
|
|
"step": 52500
|
|
},
|
|
{
|
|
"epoch": 2.16,
|
|
"learning_rate": 1.1362451108213822e-05,
|
|
"loss": 0.2823,
|
|
"step": 53000
|
|
},
|
|
{
|
|
"epoch": 2.18,
|
|
"learning_rate": 1.1280964797913951e-05,
|
|
"loss": 0.2755,
|
|
"step": 53500
|
|
},
|
|
{
|
|
"epoch": 2.2,
|
|
"learning_rate": 1.1199478487614082e-05,
|
|
"loss": 0.2722,
|
|
"step": 54000
|
|
},
|
|
{
|
|
"epoch": 2.22,
|
|
"learning_rate": 1.1117992177314212e-05,
|
|
"loss": 0.2835,
|
|
"step": 54500
|
|
},
|
|
{
|
|
"epoch": 2.24,
|
|
"learning_rate": 1.1036505867014341e-05,
|
|
"loss": 0.2825,
|
|
"step": 55000
|
|
},
|
|
{
|
|
"epoch": 2.26,
|
|
"learning_rate": 1.0955019556714473e-05,
|
|
"loss": 0.2781,
|
|
"step": 55500
|
|
},
|
|
{
|
|
"epoch": 2.28,
|
|
"learning_rate": 1.0873533246414602e-05,
|
|
"loss": 0.2866,
|
|
"step": 56000
|
|
},
|
|
{
|
|
"epoch": 2.3,
|
|
"learning_rate": 1.0792046936114733e-05,
|
|
"loss": 0.2839,
|
|
"step": 56500
|
|
},
|
|
{
|
|
"epoch": 2.32,
|
|
"learning_rate": 1.0710560625814863e-05,
|
|
"loss": 0.2915,
|
|
"step": 57000
|
|
},
|
|
{
|
|
"epoch": 2.34,
|
|
"learning_rate": 1.0629074315514994e-05,
|
|
"loss": 0.2876,
|
|
"step": 57500
|
|
},
|
|
{
|
|
"epoch": 2.36,
|
|
"learning_rate": 1.0547588005215125e-05,
|
|
"loss": 0.2768,
|
|
"step": 58000
|
|
},
|
|
{
|
|
"epoch": 2.38,
|
|
"learning_rate": 1.0466101694915256e-05,
|
|
"loss": 0.2825,
|
|
"step": 58500
|
|
},
|
|
{
|
|
"epoch": 2.4,
|
|
"learning_rate": 1.0384615384615386e-05,
|
|
"loss": 0.264,
|
|
"step": 59000
|
|
},
|
|
{
|
|
"epoch": 2.42,
|
|
"learning_rate": 1.0303129074315517e-05,
|
|
"loss": 0.2975,
|
|
"step": 59500
|
|
},
|
|
{
|
|
"epoch": 2.44,
|
|
"learning_rate": 1.0221642764015647e-05,
|
|
"loss": 0.2829,
|
|
"step": 60000
|
|
},
|
|
{
|
|
"epoch": 2.46,
|
|
"learning_rate": 1.0140156453715778e-05,
|
|
"loss": 0.2979,
|
|
"step": 60500
|
|
},
|
|
{
|
|
"epoch": 2.49,
|
|
"learning_rate": 1.0058670143415907e-05,
|
|
"loss": 0.2833,
|
|
"step": 61000
|
|
},
|
|
{
|
|
"epoch": 2.51,
|
|
"learning_rate": 9.977183833116037e-06,
|
|
"loss": 0.2856,
|
|
"step": 61500
|
|
},
|
|
{
|
|
"epoch": 2.53,
|
|
"learning_rate": 9.895697522816168e-06,
|
|
"loss": 0.291,
|
|
"step": 62000
|
|
},
|
|
{
|
|
"epoch": 2.55,
|
|
"learning_rate": 9.814211212516298e-06,
|
|
"loss": 0.2841,
|
|
"step": 62500
|
|
},
|
|
{
|
|
"epoch": 2.57,
|
|
"learning_rate": 9.732724902216429e-06,
|
|
"loss": 0.2806,
|
|
"step": 63000
|
|
},
|
|
{
|
|
"epoch": 2.59,
|
|
"learning_rate": 9.651238591916558e-06,
|
|
"loss": 0.2952,
|
|
"step": 63500
|
|
},
|
|
{
|
|
"epoch": 2.61,
|
|
"learning_rate": 9.56975228161669e-06,
|
|
"loss": 0.2876,
|
|
"step": 64000
|
|
},
|
|
{
|
|
"epoch": 2.63,
|
|
"learning_rate": 9.488265971316819e-06,
|
|
"loss": 0.285,
|
|
"step": 64500
|
|
},
|
|
{
|
|
"epoch": 2.65,
|
|
"learning_rate": 9.40677966101695e-06,
|
|
"loss": 0.28,
|
|
"step": 65000
|
|
},
|
|
{
|
|
"epoch": 2.67,
|
|
"learning_rate": 9.325293350717081e-06,
|
|
"loss": 0.2823,
|
|
"step": 65500
|
|
},
|
|
{
|
|
"epoch": 2.69,
|
|
"learning_rate": 9.24380704041721e-06,
|
|
"loss": 0.2918,
|
|
"step": 66000
|
|
},
|
|
{
|
|
"epoch": 2.71,
|
|
"learning_rate": 9.162320730117342e-06,
|
|
"loss": 0.2919,
|
|
"step": 66500
|
|
},
|
|
{
|
|
"epoch": 2.73,
|
|
"learning_rate": 9.080834419817471e-06,
|
|
"loss": 0.2888,
|
|
"step": 67000
|
|
},
|
|
{
|
|
"epoch": 2.75,
|
|
"learning_rate": 8.999348109517601e-06,
|
|
"loss": 0.2677,
|
|
"step": 67500
|
|
},
|
|
{
|
|
"epoch": 2.77,
|
|
"learning_rate": 8.917861799217732e-06,
|
|
"loss": 0.2879,
|
|
"step": 68000
|
|
},
|
|
{
|
|
"epoch": 2.79,
|
|
"learning_rate": 8.836375488917862e-06,
|
|
"loss": 0.2856,
|
|
"step": 68500
|
|
},
|
|
{
|
|
"epoch": 2.81,
|
|
"learning_rate": 8.754889178617993e-06,
|
|
"loss": 0.2923,
|
|
"step": 69000
|
|
},
|
|
{
|
|
"epoch": 2.83,
|
|
"learning_rate": 8.673402868318124e-06,
|
|
"loss": 0.2949,
|
|
"step": 69500
|
|
},
|
|
{
|
|
"epoch": 2.85,
|
|
"learning_rate": 8.591916558018254e-06,
|
|
"loss": 0.2826,
|
|
"step": 70000
|
|
},
|
|
{
|
|
"epoch": 2.87,
|
|
"learning_rate": 8.510430247718385e-06,
|
|
"loss": 0.2716,
|
|
"step": 70500
|
|
},
|
|
{
|
|
"epoch": 2.89,
|
|
"learning_rate": 8.428943937418514e-06,
|
|
"loss": 0.2858,
|
|
"step": 71000
|
|
},
|
|
{
|
|
"epoch": 2.91,
|
|
"learning_rate": 8.347457627118645e-06,
|
|
"loss": 0.284,
|
|
"step": 71500
|
|
},
|
|
{
|
|
"epoch": 2.93,
|
|
"learning_rate": 8.265971316818775e-06,
|
|
"loss": 0.2869,
|
|
"step": 72000
|
|
},
|
|
{
|
|
"epoch": 2.95,
|
|
"learning_rate": 8.184485006518904e-06,
|
|
"loss": 0.2937,
|
|
"step": 72500
|
|
},
|
|
{
|
|
"epoch": 2.97,
|
|
"learning_rate": 8.102998696219036e-06,
|
|
"loss": 0.2837,
|
|
"step": 73000
|
|
},
|
|
{
|
|
"epoch": 2.99,
|
|
"learning_rate": 8.021512385919165e-06,
|
|
"loss": 0.2783,
|
|
"step": 73500
|
|
},
|
|
{
|
|
"epoch": 3.01,
|
|
"learning_rate": 7.940026075619296e-06,
|
|
"loss": 0.2272,
|
|
"step": 74000
|
|
},
|
|
{
|
|
"epoch": 3.04,
|
|
"learning_rate": 7.858539765319428e-06,
|
|
"loss": 0.1973,
|
|
"step": 74500
|
|
},
|
|
{
|
|
"epoch": 3.06,
|
|
"learning_rate": 7.777053455019557e-06,
|
|
"loss": 0.2154,
|
|
"step": 75000
|
|
},
|
|
{
|
|
"epoch": 3.08,
|
|
"learning_rate": 7.695567144719688e-06,
|
|
"loss": 0.1972,
|
|
"step": 75500
|
|
},
|
|
{
|
|
"epoch": 3.1,
|
|
"learning_rate": 7.614080834419818e-06,
|
|
"loss": 0.213,
|
|
"step": 76000
|
|
},
|
|
{
|
|
"epoch": 3.12,
|
|
"learning_rate": 7.532594524119948e-06,
|
|
"loss": 0.1933,
|
|
"step": 76500
|
|
},
|
|
{
|
|
"epoch": 3.14,
|
|
"learning_rate": 7.451108213820078e-06,
|
|
"loss": 0.2167,
|
|
"step": 77000
|
|
},
|
|
{
|
|
"epoch": 3.16,
|
|
"learning_rate": 7.369621903520209e-06,
|
|
"loss": 0.1957,
|
|
"step": 77500
|
|
},
|
|
{
|
|
"epoch": 3.18,
|
|
"learning_rate": 7.288135593220339e-06,
|
|
"loss": 0.2187,
|
|
"step": 78000
|
|
},
|
|
{
|
|
"epoch": 3.2,
|
|
"learning_rate": 7.20664928292047e-06,
|
|
"loss": 0.2148,
|
|
"step": 78500
|
|
},
|
|
{
|
|
"epoch": 3.22,
|
|
"learning_rate": 7.125162972620601e-06,
|
|
"loss": 0.2187,
|
|
"step": 79000
|
|
},
|
|
{
|
|
"epoch": 3.24,
|
|
"learning_rate": 7.043676662320731e-06,
|
|
"loss": 0.2128,
|
|
"step": 79500
|
|
},
|
|
{
|
|
"epoch": 3.26,
|
|
"learning_rate": 6.962190352020861e-06,
|
|
"loss": 0.207,
|
|
"step": 80000
|
|
},
|
|
{
|
|
"epoch": 3.28,
|
|
"learning_rate": 6.880704041720992e-06,
|
|
"loss": 0.2116,
|
|
"step": 80500
|
|
},
|
|
{
|
|
"epoch": 3.3,
|
|
"learning_rate": 6.799217731421122e-06,
|
|
"loss": 0.2049,
|
|
"step": 81000
|
|
},
|
|
{
|
|
"epoch": 3.32,
|
|
"learning_rate": 6.7177314211212515e-06,
|
|
"loss": 0.2031,
|
|
"step": 81500
|
|
},
|
|
{
|
|
"epoch": 3.34,
|
|
"learning_rate": 6.636245110821382e-06,
|
|
"loss": 0.216,
|
|
"step": 82000
|
|
},
|
|
{
|
|
"epoch": 3.36,
|
|
"learning_rate": 6.554758800521513e-06,
|
|
"loss": 0.2082,
|
|
"step": 82500
|
|
},
|
|
{
|
|
"epoch": 3.38,
|
|
"learning_rate": 6.473272490221643e-06,
|
|
"loss": 0.2197,
|
|
"step": 83000
|
|
},
|
|
{
|
|
"epoch": 3.4,
|
|
"learning_rate": 6.391786179921774e-06,
|
|
"loss": 0.2109,
|
|
"step": 83500
|
|
},
|
|
{
|
|
"epoch": 3.42,
|
|
"learning_rate": 6.310299869621904e-06,
|
|
"loss": 0.2187,
|
|
"step": 84000
|
|
},
|
|
{
|
|
"epoch": 3.44,
|
|
"learning_rate": 6.2288135593220344e-06,
|
|
"loss": 0.2206,
|
|
"step": 84500
|
|
},
|
|
{
|
|
"epoch": 3.46,
|
|
"learning_rate": 6.147327249022165e-06,
|
|
"loss": 0.2038,
|
|
"step": 85000
|
|
},
|
|
{
|
|
"epoch": 3.48,
|
|
"learning_rate": 6.065840938722295e-06,
|
|
"loss": 0.2197,
|
|
"step": 85500
|
|
},
|
|
{
|
|
"epoch": 3.5,
|
|
"learning_rate": 5.9843546284224255e-06,
|
|
"loss": 0.2222,
|
|
"step": 86000
|
|
},
|
|
{
|
|
"epoch": 3.52,
|
|
"learning_rate": 5.902868318122556e-06,
|
|
"loss": 0.2109,
|
|
"step": 86500
|
|
},
|
|
{
|
|
"epoch": 3.54,
|
|
"learning_rate": 5.821382007822687e-06,
|
|
"loss": 0.2128,
|
|
"step": 87000
|
|
},
|
|
{
|
|
"epoch": 3.57,
|
|
"learning_rate": 5.739895697522817e-06,
|
|
"loss": 0.236,
|
|
"step": 87500
|
|
},
|
|
{
|
|
"epoch": 3.59,
|
|
"learning_rate": 5.658409387222948e-06,
|
|
"loss": 0.1958,
|
|
"step": 88000
|
|
},
|
|
{
|
|
"epoch": 3.61,
|
|
"learning_rate": 5.576923076923077e-06,
|
|
"loss": 0.2258,
|
|
"step": 88500
|
|
},
|
|
{
|
|
"epoch": 3.63,
|
|
"learning_rate": 5.4954367666232076e-06,
|
|
"loss": 0.2214,
|
|
"step": 89000
|
|
},
|
|
{
|
|
"epoch": 3.65,
|
|
"learning_rate": 5.413950456323338e-06,
|
|
"loss": 0.2241,
|
|
"step": 89500
|
|
},
|
|
{
|
|
"epoch": 3.67,
|
|
"learning_rate": 5.332464146023468e-06,
|
|
"loss": 0.2093,
|
|
"step": 90000
|
|
},
|
|
{
|
|
"epoch": 3.69,
|
|
"learning_rate": 5.250977835723599e-06,
|
|
"loss": 0.2215,
|
|
"step": 90500
|
|
},
|
|
{
|
|
"epoch": 3.71,
|
|
"learning_rate": 5.169491525423729e-06,
|
|
"loss": 0.2118,
|
|
"step": 91000
|
|
},
|
|
{
|
|
"epoch": 3.73,
|
|
"learning_rate": 5.08800521512386e-06,
|
|
"loss": 0.2216,
|
|
"step": 91500
|
|
},
|
|
{
|
|
"epoch": 3.75,
|
|
"learning_rate": 5.0065189048239905e-06,
|
|
"loss": 0.2036,
|
|
"step": 92000
|
|
},
|
|
{
|
|
"epoch": 3.77,
|
|
"learning_rate": 4.92503259452412e-06,
|
|
"loss": 0.2186,
|
|
"step": 92500
|
|
},
|
|
{
|
|
"epoch": 3.79,
|
|
"learning_rate": 4.843546284224251e-06,
|
|
"loss": 0.2201,
|
|
"step": 93000
|
|
},
|
|
{
|
|
"epoch": 3.81,
|
|
"learning_rate": 4.7620599739243815e-06,
|
|
"loss": 0.2036,
|
|
"step": 93500
|
|
},
|
|
{
|
|
"epoch": 3.83,
|
|
"learning_rate": 4.680573663624511e-06,
|
|
"loss": 0.2319,
|
|
"step": 94000
|
|
},
|
|
{
|
|
"epoch": 3.85,
|
|
"learning_rate": 4.599087353324641e-06,
|
|
"loss": 0.2126,
|
|
"step": 94500
|
|
},
|
|
{
|
|
"epoch": 3.87,
|
|
"learning_rate": 4.5176010430247726e-06,
|
|
"loss": 0.2246,
|
|
"step": 95000
|
|
},
|
|
{
|
|
"epoch": 3.89,
|
|
"learning_rate": 4.436114732724903e-06,
|
|
"loss": 0.2269,
|
|
"step": 95500
|
|
},
|
|
{
|
|
"epoch": 3.91,
|
|
"learning_rate": 4.354628422425033e-06,
|
|
"loss": 0.2074,
|
|
"step": 96000
|
|
},
|
|
{
|
|
"epoch": 3.93,
|
|
"learning_rate": 4.273142112125163e-06,
|
|
"loss": 0.2219,
|
|
"step": 96500
|
|
},
|
|
{
|
|
"epoch": 3.95,
|
|
"learning_rate": 4.191655801825294e-06,
|
|
"loss": 0.1998,
|
|
"step": 97000
|
|
},
|
|
{
|
|
"epoch": 3.97,
|
|
"learning_rate": 4.110169491525424e-06,
|
|
"loss": 0.2124,
|
|
"step": 97500
|
|
},
|
|
{
|
|
"epoch": 3.99,
|
|
"learning_rate": 4.028683181225555e-06,
|
|
"loss": 0.2225,
|
|
"step": 98000
|
|
},
|
|
{
|
|
"epoch": 4.01,
|
|
"learning_rate": 3.947196870925685e-06,
|
|
"loss": 0.1845,
|
|
"step": 98500
|
|
},
|
|
{
|
|
"epoch": 4.03,
|
|
"learning_rate": 3.865710560625815e-06,
|
|
"loss": 0.156,
|
|
"step": 99000
|
|
},
|
|
{
|
|
"epoch": 4.05,
|
|
"learning_rate": 3.7842242503259457e-06,
|
|
"loss": 0.1695,
|
|
"step": 99500
|
|
},
|
|
{
|
|
"epoch": 4.07,
|
|
"learning_rate": 3.702737940026076e-06,
|
|
"loss": 0.1729,
|
|
"step": 100000
|
|
},
|
|
{
|
|
"epoch": 4.09,
|
|
"learning_rate": 3.6212516297262064e-06,
|
|
"loss": 0.159,
|
|
"step": 100500
|
|
},
|
|
{
|
|
"epoch": 4.12,
|
|
"learning_rate": 3.5397653194263363e-06,
|
|
"loss": 0.1735,
|
|
"step": 101000
|
|
},
|
|
{
|
|
"epoch": 4.14,
|
|
"learning_rate": 3.4582790091264675e-06,
|
|
"loss": 0.1683,
|
|
"step": 101500
|
|
},
|
|
{
|
|
"epoch": 4.16,
|
|
"learning_rate": 3.3767926988265974e-06,
|
|
"loss": 0.1734,
|
|
"step": 102000
|
|
},
|
|
{
|
|
"epoch": 4.18,
|
|
"learning_rate": 3.2953063885267278e-06,
|
|
"loss": 0.1575,
|
|
"step": 102500
|
|
},
|
|
{
|
|
"epoch": 4.2,
|
|
"learning_rate": 3.213820078226858e-06,
|
|
"loss": 0.1643,
|
|
"step": 103000
|
|
},
|
|
{
|
|
"epoch": 4.22,
|
|
"learning_rate": 3.1323337679269885e-06,
|
|
"loss": 0.1626,
|
|
"step": 103500
|
|
},
|
|
{
|
|
"epoch": 4.24,
|
|
"learning_rate": 3.0508474576271192e-06,
|
|
"loss": 0.1631,
|
|
"step": 104000
|
|
},
|
|
{
|
|
"epoch": 4.26,
|
|
"learning_rate": 2.969361147327249e-06,
|
|
"loss": 0.1731,
|
|
"step": 104500
|
|
},
|
|
{
|
|
"epoch": 4.28,
|
|
"learning_rate": 2.8878748370273795e-06,
|
|
"loss": 0.1729,
|
|
"step": 105000
|
|
},
|
|
{
|
|
"epoch": 4.3,
|
|
"learning_rate": 2.80638852672751e-06,
|
|
"loss": 0.1658,
|
|
"step": 105500
|
|
},
|
|
{
|
|
"epoch": 4.32,
|
|
"learning_rate": 2.7249022164276406e-06,
|
|
"loss": 0.1641,
|
|
"step": 106000
|
|
},
|
|
{
|
|
"epoch": 4.34,
|
|
"learning_rate": 2.643415906127771e-06,
|
|
"loss": 0.1716,
|
|
"step": 106500
|
|
},
|
|
{
|
|
"epoch": 4.36,
|
|
"learning_rate": 2.5619295958279013e-06,
|
|
"loss": 0.1765,
|
|
"step": 107000
|
|
},
|
|
{
|
|
"epoch": 4.38,
|
|
"learning_rate": 2.4804432855280312e-06,
|
|
"loss": 0.1713,
|
|
"step": 107500
|
|
},
|
|
{
|
|
"epoch": 4.4,
|
|
"learning_rate": 2.398956975228162e-06,
|
|
"loss": 0.1716,
|
|
"step": 108000
|
|
},
|
|
{
|
|
"epoch": 4.42,
|
|
"learning_rate": 2.3174706649282924e-06,
|
|
"loss": 0.1754,
|
|
"step": 108500
|
|
},
|
|
{
|
|
"epoch": 4.44,
|
|
"learning_rate": 2.2359843546284227e-06,
|
|
"loss": 0.1551,
|
|
"step": 109000
|
|
},
|
|
{
|
|
"epoch": 4.46,
|
|
"learning_rate": 2.154498044328553e-06,
|
|
"loss": 0.1704,
|
|
"step": 109500
|
|
},
|
|
{
|
|
"epoch": 4.48,
|
|
"learning_rate": 2.0730117340286834e-06,
|
|
"loss": 0.1744,
|
|
"step": 110000
|
|
},
|
|
{
|
|
"epoch": 4.5,
|
|
"learning_rate": 1.9915254237288137e-06,
|
|
"loss": 0.1608,
|
|
"step": 110500
|
|
},
|
|
{
|
|
"epoch": 4.52,
|
|
"learning_rate": 1.910039113428944e-06,
|
|
"loss": 0.1731,
|
|
"step": 111000
|
|
},
|
|
{
|
|
"epoch": 4.54,
|
|
"learning_rate": 1.8285528031290744e-06,
|
|
"loss": 0.1635,
|
|
"step": 111500
|
|
},
|
|
{
|
|
"epoch": 4.56,
|
|
"learning_rate": 1.7470664928292048e-06,
|
|
"loss": 0.1539,
|
|
"step": 112000
|
|
},
|
|
{
|
|
"epoch": 4.58,
|
|
"learning_rate": 1.6655801825293353e-06,
|
|
"loss": 0.1608,
|
|
"step": 112500
|
|
},
|
|
{
|
|
"epoch": 4.6,
|
|
"learning_rate": 1.5840938722294655e-06,
|
|
"loss": 0.1732,
|
|
"step": 113000
|
|
},
|
|
{
|
|
"epoch": 4.62,
|
|
"learning_rate": 1.502607561929596e-06,
|
|
"loss": 0.1554,
|
|
"step": 113500
|
|
},
|
|
{
|
|
"epoch": 4.64,
|
|
"learning_rate": 1.4211212516297262e-06,
|
|
"loss": 0.1719,
|
|
"step": 114000
|
|
},
|
|
{
|
|
"epoch": 4.67,
|
|
"learning_rate": 1.3396349413298567e-06,
|
|
"loss": 0.1605,
|
|
"step": 114500
|
|
},
|
|
{
|
|
"epoch": 4.69,
|
|
"learning_rate": 1.258148631029987e-06,
|
|
"loss": 0.1698,
|
|
"step": 115000
|
|
},
|
|
{
|
|
"epoch": 4.71,
|
|
"learning_rate": 1.1766623207301174e-06,
|
|
"loss": 0.1686,
|
|
"step": 115500
|
|
},
|
|
{
|
|
"epoch": 4.73,
|
|
"learning_rate": 1.0951760104302478e-06,
|
|
"loss": 0.1651,
|
|
"step": 116000
|
|
},
|
|
{
|
|
"epoch": 4.75,
|
|
"learning_rate": 1.0136897001303781e-06,
|
|
"loss": 0.1647,
|
|
"step": 116500
|
|
},
|
|
{
|
|
"epoch": 4.77,
|
|
"learning_rate": 9.322033898305086e-07,
|
|
"loss": 0.1735,
|
|
"step": 117000
|
|
},
|
|
{
|
|
"epoch": 4.79,
|
|
"learning_rate": 8.507170795306389e-07,
|
|
"loss": 0.1578,
|
|
"step": 117500
|
|
},
|
|
{
|
|
"epoch": 4.81,
|
|
"learning_rate": 7.692307692307694e-07,
|
|
"loss": 0.1729,
|
|
"step": 118000
|
|
},
|
|
{
|
|
"epoch": 4.83,
|
|
"learning_rate": 6.877444589308997e-07,
|
|
"loss": 0.1682,
|
|
"step": 118500
|
|
},
|
|
{
|
|
"epoch": 4.85,
|
|
"learning_rate": 6.0625814863103e-07,
|
|
"loss": 0.1585,
|
|
"step": 119000
|
|
},
|
|
{
|
|
"epoch": 4.87,
|
|
"learning_rate": 5.247718383311604e-07,
|
|
"loss": 0.1605,
|
|
"step": 119500
|
|
},
|
|
{
|
|
"epoch": 4.89,
|
|
"learning_rate": 4.432855280312908e-07,
|
|
"loss": 0.1629,
|
|
"step": 120000
|
|
},
|
|
{
|
|
"epoch": 4.91,
|
|
"learning_rate": 3.6179921773142114e-07,
|
|
"loss": 0.1504,
|
|
"step": 120500
|
|
},
|
|
{
|
|
"epoch": 4.93,
|
|
"learning_rate": 2.803129074315515e-07,
|
|
"loss": 0.1706,
|
|
"step": 121000
|
|
},
|
|
{
|
|
"epoch": 4.95,
|
|
"learning_rate": 1.988265971316819e-07,
|
|
"loss": 0.16,
|
|
"step": 121500
|
|
},
|
|
{
|
|
"epoch": 4.97,
|
|
"learning_rate": 1.1734028683181226e-07,
|
|
"loss": 0.1641,
|
|
"step": 122000
|
|
},
|
|
{
|
|
"epoch": 4.99,
|
|
"learning_rate": 3.585397653194264e-08,
|
|
"loss": 0.1709,
|
|
"step": 122500
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"step": 122720,
|
|
"total_flos": 98499428530398720,
|
|
"train_runtime": 14248.9446,
|
|
"train_samples_per_second": 8.613
|
|
}
|
|
],
|
|
"max_steps": 122720,
|
|
"num_train_epochs": 5,
|
|
"total_flos": 98499428530398720,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|