generated from xuyuqing/ailab
1220 lines
24 KiB
JSON
1220 lines
24 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 4.9906191369606,
|
|
"global_step": 1995,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 9.999380066679942e-05,
|
|
"loss": 6.0782,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 9.997520420446694e-05,
|
|
"loss": 5.3688,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 9.99442152244292e-05,
|
|
"loss": 4.9442,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 9.990084141112673e-05,
|
|
"loss": 4.4859,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 9.984509352010838e-05,
|
|
"loss": 4.3563,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 9.977698537536419e-05,
|
|
"loss": 4.2417,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 9.969653386589748e-05,
|
|
"loss": 4.0783,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 9.96037589415368e-05,
|
|
"loss": 4.0693,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 9.949868360798893e-05,
|
|
"loss": 4.0495,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 9.938133392113398e-05,
|
|
"loss": 4.0433,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 9.925173898056435e-05,
|
|
"loss": 3.9894,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 9.910993092236878e-05,
|
|
"loss": 3.9666,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 9.895594491116336e-05,
|
|
"loss": 3.9527,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 9.880697684843487e-05,
|
|
"loss": 3.934,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 9.862996041285071e-05,
|
|
"loss": 3.9631,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 9.844088504414131e-05,
|
|
"loss": 3.9519,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 9.82397976279551e-05,
|
|
"loss": 3.9718,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 9.80267480286079e-05,
|
|
"loss": 3.9922,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 9.780178907671789e-05,
|
|
"loss": 3.9988,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 9.756497655610504e-05,
|
|
"loss": 4.0149,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 9.731636918995821e-05,
|
|
"loss": 3.9659,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 9.705602862627335e-05,
|
|
"loss": 3.9068,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 9.678401942256647e-05,
|
|
"loss": 3.9793,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 9.650040902986503e-05,
|
|
"loss": 3.8925,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 9.620526777598202e-05,
|
|
"loss": 3.869,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 9.589866884807635e-05,
|
|
"loss": 3.8753,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 9.55806882745046e-05,
|
|
"loss": 3.9413,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 9.525140490596778e-05,
|
|
"loss": 3.983,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 9.491090039595869e-05,
|
|
"loss": 3.8938,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 9.455925918051388e-05,
|
|
"loss": 3.9515,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 9.419656845727582e-05,
|
|
"loss": 3.9323,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.386077375438848e-05,
|
|
"loss": 3.8784,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 9.347733899113708e-05,
|
|
"loss": 3.9068,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 9.308312300744246e-05,
|
|
"loss": 3.9492,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 9.267822355835402e-05,
|
|
"loss": 3.9681,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 9.226274104813568e-05,
|
|
"loss": 3.8544,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 9.183677850536822e-05,
|
|
"loss": 3.8653,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 9.140044155740101e-05,
|
|
"loss": 3.9159,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 9.095383840415915e-05,
|
|
"loss": 3.9288,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 9.049707979131288e-05,
|
|
"loss": 3.926,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 1.03,
|
|
"learning_rate": 9.003027898281552e-05,
|
|
"loss": 3.9602,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 1.05,
|
|
"learning_rate": 8.955355173281708e-05,
|
|
"loss": 3.9455,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 1.08,
|
|
"learning_rate": 8.906701625696028e-05,
|
|
"loss": 3.9189,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"learning_rate": 8.85707932030663e-05,
|
|
"loss": 3.9093,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 1.13,
|
|
"learning_rate": 8.806500562121723e-05,
|
|
"loss": 3.9124,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 1.15,
|
|
"learning_rate": 8.754977893324305e-05,
|
|
"loss": 3.8797,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 1.18,
|
|
"learning_rate": 8.702524090162021e-05,
|
|
"loss": 3.9157,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"learning_rate": 8.649152159779014e-05,
|
|
"loss": 3.8361,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 1.23,
|
|
"learning_rate": 8.594875336990483e-05,
|
|
"loss": 3.9486,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 1.25,
|
|
"learning_rate": 8.539707081000807e-05,
|
|
"loss": 3.9365,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 8.483661072066027e-05,
|
|
"loss": 3.9262,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 1.3,
|
|
"learning_rate": 8.4267512081015e-05,
|
|
"loss": 3.9852,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 1.33,
|
|
"learning_rate": 8.36899160123559e-05,
|
|
"loss": 3.9534,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 1.35,
|
|
"learning_rate": 8.31039657431024e-05,
|
|
"loss": 3.908,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 1.38,
|
|
"learning_rate": 8.250980657329278e-05,
|
|
"loss": 3.9295,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.4,
|
|
"learning_rate": 8.190758583855378e-05,
|
|
"loss": 3.9352,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 1.43,
|
|
"learning_rate": 8.135881792367686e-05,
|
|
"loss": 3.8359,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"learning_rate": 8.074169325499913e-05,
|
|
"loss": 3.9291,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 1.48,
|
|
"learning_rate": 8.01169454663359e-05,
|
|
"loss": 3.8899,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 1.5,
|
|
"learning_rate": 7.948472947847546e-05,
|
|
"loss": 3.8512,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 1.53,
|
|
"learning_rate": 7.884520206412035e-05,
|
|
"loss": 3.9138,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"learning_rate": 7.81985218090119e-05,
|
|
"loss": 3.9402,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 1.58,
|
|
"learning_rate": 7.754484907260513e-05,
|
|
"loss": 3.9229,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"learning_rate": 7.688434594830392e-05,
|
|
"loss": 3.9898,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 7.628418849052523e-05,
|
|
"loss": 3.9128,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 1.65,
|
|
"learning_rate": 7.561116023000997e-05,
|
|
"loss": 3.8917,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 1.68,
|
|
"learning_rate": 7.493178108485792e-05,
|
|
"loss": 3.8849,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 1.7,
|
|
"learning_rate": 7.424621952297668e-05,
|
|
"loss": 3.9241,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 7.362406862579298e-05,
|
|
"loss": 3.9071,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 1.75,
|
|
"learning_rate": 7.29272300579533e-05,
|
|
"loss": 3.9416,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 1.78,
|
|
"learning_rate": 7.229520942582965e-05,
|
|
"loss": 3.9031,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"learning_rate": 7.158771761692464e-05,
|
|
"loss": 3.9428,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"learning_rate": 7.087487262983776e-05,
|
|
"loss": 3.9572,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 1.85,
|
|
"learning_rate": 7.015685123111276e-05,
|
|
"loss": 3.8725,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 1.88,
|
|
"learning_rate": 6.943383147090552e-05,
|
|
"loss": 3.9596,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 1.9,
|
|
"learning_rate": 6.870599263883219e-05,
|
|
"loss": 3.8494,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 1.93,
|
|
"learning_rate": 6.797351521951021e-05,
|
|
"loss": 3.9234,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 1.95,
|
|
"learning_rate": 6.723658084780297e-05,
|
|
"loss": 3.8579,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 6.649537226377915e-05,
|
|
"loss": 3.9918,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"learning_rate": 6.575007326739809e-05,
|
|
"loss": 3.9396,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 2.03,
|
|
"learning_rate": 6.50008686729323e-05,
|
|
"loss": 3.8686,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 2.05,
|
|
"learning_rate": 6.424794426313845e-05,
|
|
"loss": 3.9506,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 2.08,
|
|
"learning_rate": 6.349148674318816e-05,
|
|
"loss": 3.9296,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 2.1,
|
|
"learning_rate": 6.273168369437018e-05,
|
|
"loss": 3.8906,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 2.13,
|
|
"learning_rate": 6.196872352757516e-05,
|
|
"loss": 3.9072,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 2.15,
|
|
"learning_rate": 6.12027954365748e-05,
|
|
"loss": 3.8331,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 2.18,
|
|
"learning_rate": 6.043408935110688e-05,
|
|
"loss": 3.843,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 2.2,
|
|
"learning_rate": 5.9662795889777666e-05,
|
|
"loss": 3.9493,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 2.23,
|
|
"learning_rate": 5.888910631279365e-05,
|
|
"loss": 3.8179,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 2.25,
|
|
"learning_rate": 5.81132124745341e-05,
|
|
"loss": 3.9128,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 2.28,
|
|
"learning_rate": 5.733530677597627e-05,
|
|
"loss": 3.9505,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 2.3,
|
|
"learning_rate": 5.655558211698513e-05,
|
|
"loss": 3.87,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 2.33,
|
|
"learning_rate": 5.577423184847932e-05,
|
|
"loss": 3.9356,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 2.35,
|
|
"learning_rate": 5.506978684083671e-05,
|
|
"loss": 3.9159,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 2.38,
|
|
"learning_rate": 5.428588200104875e-05,
|
|
"loss": 3.8956,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 2.4,
|
|
"learning_rate": 5.350091437683746e-05,
|
|
"loss": 3.9291,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 2.43,
|
|
"learning_rate": 5.2715078619237015e-05,
|
|
"loss": 3.9108,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 2.45,
|
|
"learning_rate": 5.1928569594555524e-05,
|
|
"loss": 3.9431,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 2.48,
|
|
"learning_rate": 5.114158233605334e-05,
|
|
"loss": 3.9032,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 2.5,
|
|
"learning_rate": 5.035431199558007e-05,
|
|
"loss": 3.8656,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 2.53,
|
|
"learning_rate": 4.9566953795182116e-05,
|
|
"loss": 3.9753,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 2.55,
|
|
"learning_rate": 4.877970297869273e-05,
|
|
"loss": 3.8629,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 2.58,
|
|
"learning_rate": 4.799275476331691e-05,
|
|
"loss": 3.923,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 2.6,
|
|
"learning_rate": 4.728492138076299e-05,
|
|
"loss": 3.9497,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 2.63,
|
|
"learning_rate": 4.649908562316254e-05,
|
|
"loss": 3.901,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 2.65,
|
|
"learning_rate": 4.571411799895126e-05,
|
|
"loss": 3.8918,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 2.68,
|
|
"learning_rate": 4.493021315916328e-05,
|
|
"loss": 4.006,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 2.7,
|
|
"learning_rate": 4.41475654912906e-05,
|
|
"loss": 3.8415,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 2.73,
|
|
"learning_rate": 4.336636907108009e-05,
|
|
"loss": 3.8327,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 2.75,
|
|
"learning_rate": 4.2586817614407895e-05,
|
|
"loss": 3.9218,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 2.78,
|
|
"learning_rate": 4.180910442924312e-05,
|
|
"loss": 3.9475,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 2.8,
|
|
"learning_rate": 4.10334223677125e-05,
|
|
"loss": 3.8446,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 2.83,
|
|
"learning_rate": 4.025996377827835e-05,
|
|
"loss": 3.9219,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 2.85,
|
|
"learning_rate": 3.9488920458041166e-05,
|
|
"loss": 3.9198,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 2.88,
|
|
"learning_rate": 3.887395330218429e-05,
|
|
"loss": 3.9445,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 2.9,
|
|
"learning_rate": 3.8184230724140076e-05,
|
|
"loss": 3.8824,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 2.93,
|
|
"learning_rate": 3.742066181277458e-05,
|
|
"loss": 3.9045,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 2.95,
|
|
"learning_rate": 3.666021224176369e-05,
|
|
"loss": 3.9055,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 2.98,
|
|
"learning_rate": 3.5903070582318356e-05,
|
|
"loss": 3.8711,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"learning_rate": 3.5149424585375625e-05,
|
|
"loss": 3.8903,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 3.03,
|
|
"learning_rate": 3.439946113504152e-05,
|
|
"loss": 3.9343,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 3.05,
|
|
"learning_rate": 3.365336620224874e-05,
|
|
"loss": 3.8994,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 3.08,
|
|
"learning_rate": 3.2911324798640766e-05,
|
|
"loss": 3.8374,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 3.1,
|
|
"learning_rate": 3.2173520930693986e-05,
|
|
"loss": 3.9143,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 3.13,
|
|
"learning_rate": 3.144013755408895e-05,
|
|
"loss": 3.9455,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 3.15,
|
|
"learning_rate": 3.0784022365509254e-05,
|
|
"loss": 3.8526,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 3.18,
|
|
"learning_rate": 3.005953800309752e-05,
|
|
"loss": 3.945,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 3.2,
|
|
"learning_rate": 2.933999834340948e-05,
|
|
"loss": 3.9014,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 3.23,
|
|
"learning_rate": 2.8625581813089197e-05,
|
|
"loss": 3.9427,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 3.25,
|
|
"learning_rate": 2.79164655683813e-05,
|
|
"loss": 3.986,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 3.28,
|
|
"learning_rate": 2.7282938053546726e-05,
|
|
"loss": 3.9099,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 3.3,
|
|
"learning_rate": 2.6584375674542088e-05,
|
|
"loss": 3.9174,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 3.33,
|
|
"learning_rate": 2.5891619745829183e-05,
|
|
"loss": 3.8473,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 3.35,
|
|
"learning_rate": 2.5204842052401163e-05,
|
|
"loss": 3.9291,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 3.38,
|
|
"learning_rate": 2.4524212896808263e-05,
|
|
"loss": 3.9068,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 3.4,
|
|
"learning_rate": 2.3849901056927383e-05,
|
|
"loss": 3.9449,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 3.43,
|
|
"learning_rate": 2.3182073744109563e-05,
|
|
"loss": 3.9191,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 3.45,
|
|
"learning_rate": 2.2520896561716088e-05,
|
|
"loss": 3.8854,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 3.48,
|
|
"learning_rate": 2.186653346405333e-05,
|
|
"loss": 3.8854,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 3.5,
|
|
"learning_rate": 2.1219146715716332e-05,
|
|
"loss": 3.8778,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 3.53,
|
|
"learning_rate": 2.0578896851351602e-05,
|
|
"loss": 3.9063,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 3.55,
|
|
"learning_rate": 1.9945942635848748e-05,
|
|
"loss": 3.9231,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 3.58,
|
|
"learning_rate": 1.9320441024971115e-05,
|
|
"loss": 3.9061,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 3.6,
|
|
"learning_rate": 1.8702547126434817e-05,
|
|
"loss": 3.8533,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 3.63,
|
|
"learning_rate": 1.8092414161446226e-05,
|
|
"loss": 3.8664,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 3.65,
|
|
"learning_rate": 1.7490193426707235e-05,
|
|
"loss": 3.8676,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 3.68,
|
|
"learning_rate": 1.6896034256897624e-05,
|
|
"loss": 3.8925,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 3.7,
|
|
"learning_rate": 1.636830545687481e-05,
|
|
"loss": 3.8659,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 3.73,
|
|
"learning_rate": 1.5789867488415632e-05,
|
|
"loss": 3.8847,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 3.75,
|
|
"learning_rate": 1.5219912720367474e-05,
|
|
"loss": 3.8723,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 3.78,
|
|
"learning_rate": 1.465858248631099e-05,
|
|
"loss": 3.8678,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 3.8,
|
|
"learning_rate": 1.410601598117246e-05,
|
|
"loss": 3.8755,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 3.83,
|
|
"learning_rate": 1.3562350226707105e-05,
|
|
"loss": 3.8532,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 3.85,
|
|
"learning_rate": 1.3080772659931728e-05,
|
|
"loss": 3.8913,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 3.88,
|
|
"learning_rate": 1.255438789482935e-05,
|
|
"loss": 3.9418,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 3.9,
|
|
"learning_rate": 1.2037288642780576e-05,
|
|
"loss": 3.9547,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 3.93,
|
|
"learning_rate": 1.1529603130607836e-05,
|
|
"loss": 3.8475,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 3.95,
|
|
"learning_rate": 1.1031457250777206e-05,
|
|
"loss": 3.8655,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 3.98,
|
|
"learning_rate": 1.0542974530180327e-05,
|
|
"loss": 3.8665,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 4.0,
|
|
"learning_rate": 1.00642760995031e-05,
|
|
"loss": 3.9558,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 4.03,
|
|
"learning_rate": 9.595480663188527e-06,
|
|
"loss": 3.8678,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 4.05,
|
|
"learning_rate": 9.1367044700011e-06,
|
|
"loss": 3.8966,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 4.08,
|
|
"learning_rate": 8.688061284200266e-06,
|
|
"loss": 3.9133,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 4.1,
|
|
"learning_rate": 8.24966235732988e-06,
|
|
"loss": 3.8803,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 4.13,
|
|
"learning_rate": 7.821616400630865e-06,
|
|
"loss": 3.8887,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 4.15,
|
|
"learning_rate": 7.404029558083653e-06,
|
|
"loss": 3.8584,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 4.18,
|
|
"learning_rate": 6.997005380087302e-06,
|
|
"loss": 3.8848,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 4.2,
|
|
"learning_rate": 6.600644797781847e-06,
|
|
"loss": 3.9117,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 4.23,
|
|
"learning_rate": 6.215046098019966e-06,
|
|
"loss": 3.9279,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 4.25,
|
|
"learning_rate": 5.84030489899452e-06,
|
|
"loss": 3.8673,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 4.28,
|
|
"learning_rate": 5.476514126527771e-06,
|
|
"loss": 3.9193,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 4.3,
|
|
"learning_rate": 5.123763991028291e-06,
|
|
"loss": 3.9194,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 4.33,
|
|
"learning_rate": 4.782141965121128e-06,
|
|
"loss": 3.9381,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 4.35,
|
|
"learning_rate": 4.4517327619569785e-06,
|
|
"loss": 3.8703,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 4.38,
|
|
"learning_rate": 4.164019217687215e-06,
|
|
"loss": 3.8278,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 4.4,
|
|
"learning_rate": 3.855137787788848e-06,
|
|
"loss": 3.8555,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 4.43,
|
|
"learning_rate": 3.5576990529444466e-06,
|
|
"loss": 3.927,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 4.45,
|
|
"learning_rate": 3.2998487148374725e-06,
|
|
"loss": 3.887,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 4.48,
|
|
"learning_rate": 3.024351940850789e-06,
|
|
"loss": 3.9282,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 4.5,
|
|
"learning_rate": 2.786362602027587e-06,
|
|
"loss": 3.8723,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 4.53,
|
|
"learning_rate": 2.533054395822704e-06,
|
|
"loss": 3.8396,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 4.55,
|
|
"learning_rate": 2.2915167260904092e-06,
|
|
"loss": 3.8836,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 4.58,
|
|
"learning_rate": 2.0618094877305038e-06,
|
|
"loss": 3.9258,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 4.6,
|
|
"learning_rate": 1.8652351313642568e-06,
|
|
"loss": 3.9323,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 4.63,
|
|
"learning_rate": 1.6581602048657385e-06,
|
|
"loss": 3.913,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 4.65,
|
|
"learning_rate": 1.463072765264001e-06,
|
|
"loss": 3.9473,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 4.68,
|
|
"learning_rate": 1.2800211890407087e-06,
|
|
"loss": 3.8331,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 4.7,
|
|
"learning_rate": 1.1256030118930727e-06,
|
|
"loss": 3.8342,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 4.73,
|
|
"learning_rate": 9.655421549647603e-07,
|
|
"loss": 3.8911,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 4.75,
|
|
"learning_rate": 8.176405357361195e-07,
|
|
"loss": 3.8928,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 4.78,
|
|
"learning_rate": 6.819348298638839e-07,
|
|
"loss": 3.9422,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 4.8,
|
|
"learning_rate": 5.584586887435739e-07,
|
|
"loss": 3.875,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 4.83,
|
|
"learning_rate": 4.472427311648375e-07,
|
|
"loss": 3.8262,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 4.85,
|
|
"learning_rate": 3.4831453571879667e-07,
|
|
"loss": 3.8851,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 4.88,
|
|
"learning_rate": 2.6169863395932303e-07,
|
|
"loss": 3.9445,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 4.9,
|
|
"learning_rate": 1.8741650431982616e-07,
|
|
"loss": 3.9217,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 4.93,
|
|
"learning_rate": 1.2548656678721406e-07,
|
|
"loss": 3.9199,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 4.95,
|
|
"learning_rate": 7.59241783341913e-08,
|
|
"loss": 3.9151,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 4.98,
|
|
"learning_rate": 3.8741629111171964e-08,
|
|
"loss": 3.8435,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 4.99,
|
|
"step": 1995,
|
|
"total_flos": 3.421391470957953e+18,
|
|
"train_loss": 3.942027114447496,
|
|
"train_runtime": 4658.7228,
|
|
"train_samples_per_second": 54.898,
|
|
"train_steps_per_second": 0.428
|
|
}
|
|
],
|
|
"max_steps": 1995,
|
|
"num_train_epochs": 5,
|
|
"total_flos": 3.421391470957953e+18,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|