generated from xuyuqing/ailab
1040 lines
20 KiB
JSON
1040 lines
20 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 2.997275204359673,
|
|
"global_step": 1650,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 9e-06,
|
|
"loss": 3.3303,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 1.9e-05,
|
|
"loss": 3.1435,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 2.9e-05,
|
|
"loss": 2.9781,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 3.9000000000000006e-05,
|
|
"loss": 2.9275,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 4.9e-05,
|
|
"loss": 2.828,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 4.9996096595934404e-05,
|
|
"loss": 2.7839,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 4.9982604912220876e-05,
|
|
"loss": 2.7818,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 4.9959481958748065e-05,
|
|
"loss": 2.7402,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 4.99267366498547e-05,
|
|
"loss": 2.7275,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 4.988438160948068e-05,
|
|
"loss": 2.6234,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 4.9832433166300355e-05,
|
|
"loss": 2.6431,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 4.977091134742743e-05,
|
|
"loss": 2.6141,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 4.9699839870694225e-05,
|
|
"loss": 2.7426,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 4.9619246135507924e-05,
|
|
"loss": 2.6885,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 4.95291612122876e-05,
|
|
"loss": 2.6116,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 4.9429619830486013e-05,
|
|
"loss": 2.6286,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 4.93206603652007e-05,
|
|
"loss": 2.612,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 4.920232482237966e-05,
|
|
"loss": 2.5685,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 4.907465882262728e-05,
|
|
"loss": 2.5937,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 4.89377115836167e-05,
|
|
"loss": 2.579,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 4.8791535901115457e-05,
|
|
"loss": 2.6084,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 4.8636188128631746e-05,
|
|
"loss": 2.6924,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 4.8471728155689035e-05,
|
|
"loss": 2.7031,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 4.829821938473753e-05,
|
|
"loss": 2.5916,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 4.8115728706711335e-05,
|
|
"loss": 2.5251,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 4.792432647524068e-05,
|
|
"loss": 2.5536,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 4.772408647952932e-05,
|
|
"loss": 2.5483,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 4.751508591590734e-05,
|
|
"loss": 2.6099,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 4.729740535807052e-05,
|
|
"loss": 2.5678,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 4.707112872601764e-05,
|
|
"loss": 2.5789,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 4.686020214463798e-05,
|
|
"loss": 2.5413,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 4.661783601300388e-05,
|
|
"loss": 2.5805,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 4.639257812039141e-05,
|
|
"loss": 2.6076,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 4.6134459767586845e-05,
|
|
"loss": 2.5348,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 4.586819367822904e-05,
|
|
"loss": 2.5935,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 4.559388250296406e-05,
|
|
"loss": 2.6291,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 4.53116319939717e-05,
|
|
"loss": 2.6044,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 4.502155096419608e-05,
|
|
"loss": 2.7157,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 4.472375124539612e-05,
|
|
"loss": 2.5929,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 4.441834764503228e-05,
|
|
"loss": 2.6576,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 4.4137080337327205e-05,
|
|
"loss": 2.5642,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 4.3817556118942425e-05,
|
|
"loss": 2.5913,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 4.349077737446525e-05,
|
|
"loss": 2.5249,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 4.315687008332217e-05,
|
|
"loss": 2.6176,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 4.281596297313013e-05,
|
|
"loss": 2.5421,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 4.2468187470069607e-05,
|
|
"loss": 2.5828,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 4.211367764821722e-05,
|
|
"loss": 2.5956,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.17525701778577e-05,
|
|
"loss": 2.5767,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 4.138500427279485e-05,
|
|
"loss": 2.6687,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 4.101112163668203e-05,
|
|
"loss": 2.553,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 4.066934554048674e-05,
|
|
"loss": 2.5704,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 4.028386018592041e-05,
|
|
"loss": 2.5323,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.9892482612310836e-05,
|
|
"loss": 2.5841,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 3.949536370316285e-05,
|
|
"loss": 2.5728,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 3.909265655537542e-05,
|
|
"loss": 2.6388,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"eval_loss": 2.567617893218994,
|
|
"eval_runtime": 85.0781,
|
|
"eval_samples_per_second": 207.045,
|
|
"eval_steps_per_second": 6.476,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"learning_rate": 3.868451642021992e-05,
|
|
"loss": 2.5005,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 1.04,
|
|
"learning_rate": 3.827110064348773e-05,
|
|
"loss": 2.5417,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 1.05,
|
|
"learning_rate": 3.785256860483054e-05,
|
|
"loss": 2.5753,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 1.07,
|
|
"learning_rate": 3.742908165631636e-05,
|
|
"loss": 2.619,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 1.09,
|
|
"learning_rate": 3.7000803060225284e-05,
|
|
"loss": 2.4701,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 1.11,
|
|
"learning_rate": 3.6567897926108756e-05,
|
|
"loss": 2.5073,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 1.13,
|
|
"learning_rate": 3.6174465516778035e-05,
|
|
"loss": 2.6094,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 1.14,
|
|
"learning_rate": 3.5777543631116977e-05,
|
|
"loss": 2.5532,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 1.16,
|
|
"learning_rate": 3.533257750864685e-05,
|
|
"loss": 2.5672,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 1.18,
|
|
"learning_rate": 3.4883627980857525e-05,
|
|
"loss": 2.5595,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"learning_rate": 3.4430868126338124e-05,
|
|
"loss": 2.5645,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 1.22,
|
|
"learning_rate": 3.397447249263124e-05,
|
|
"loss": 2.5174,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 1.24,
|
|
"learning_rate": 3.351461702894155e-05,
|
|
"loss": 2.5702,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 1.25,
|
|
"learning_rate": 3.305147901830405e-05,
|
|
"loss": 2.5364,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 1.27,
|
|
"learning_rate": 3.2631995776401094e-05,
|
|
"loss": 2.5036,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 1.29,
|
|
"learning_rate": 3.2163113818194964e-05,
|
|
"loss": 2.5738,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 1.31,
|
|
"learning_rate": 3.169147034325582e-05,
|
|
"loss": 2.5802,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 1.33,
|
|
"learning_rate": 3.121724717912138e-05,
|
|
"loss": 2.513,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 1.34,
|
|
"learning_rate": 3.0740627147848675e-05,
|
|
"loss": 2.5581,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 1.36,
|
|
"learning_rate": 3.026179399553264e-05,
|
|
"loss": 2.4153,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 1.38,
|
|
"learning_rate": 2.9780932321468515e-05,
|
|
"loss": 2.6043,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 1.4,
|
|
"learning_rate": 2.929822750698524e-05,
|
|
"loss": 2.5542,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 1.42,
|
|
"learning_rate": 2.8813865643977526e-05,
|
|
"loss": 2.5185,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 1.44,
|
|
"learning_rate": 2.832803346316381e-05,
|
|
"loss": 2.4305,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"learning_rate": 2.784091826209803e-05,
|
|
"loss": 2.536,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 1.47,
|
|
"learning_rate": 2.7352707832962865e-05,
|
|
"loss": 2.5403,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 1.49,
|
|
"learning_rate": 2.6912537578399655e-05,
|
|
"loss": 2.6087,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 1.51,
|
|
"learning_rate": 2.6422765037147695e-05,
|
|
"loss": 2.6133,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 1.53,
|
|
"learning_rate": 2.5932443992890344e-05,
|
|
"loss": 2.5111,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 1.54,
|
|
"learning_rate": 2.5441763473723846e-05,
|
|
"loss": 2.58,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 1.56,
|
|
"learning_rate": 2.4950912646328875e-05,
|
|
"loss": 2.5494,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 1.58,
|
|
"learning_rate": 2.446008074304316e-05,
|
|
"loss": 2.5761,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"learning_rate": 2.396945698890884e-05,
|
|
"loss": 2.5312,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 1.62,
|
|
"learning_rate": 2.3479230528722583e-05,
|
|
"loss": 2.6176,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 2.2989590354116666e-05,
|
|
"loss": 2.524,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 1.65,
|
|
"learning_rate": 2.250072523069914e-05,
|
|
"loss": 2.547,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 1.67,
|
|
"learning_rate": 2.201282362528105e-05,
|
|
"loss": 2.6042,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 1.69,
|
|
"learning_rate": 2.1526073633218907e-05,
|
|
"loss": 2.5907,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 1.71,
|
|
"learning_rate": 2.1040662905900378e-05,
|
|
"loss": 2.5396,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 2.0556778578401055e-05,
|
|
"loss": 2.5532,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 1.74,
|
|
"learning_rate": 2.0122741949596797e-05,
|
|
"loss": 2.5517,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 1.76,
|
|
"learning_rate": 1.969022835350477e-05,
|
|
"loss": 2.5435,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 1.78,
|
|
"learning_rate": 1.921160821984587e-05,
|
|
"loss": 2.5524,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"learning_rate": 1.8735219621405293e-05,
|
|
"loss": 2.5543,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 1.82,
|
|
"learning_rate": 1.82612462150562e-05,
|
|
"loss": 2.5605,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"learning_rate": 1.7789870726569255e-05,
|
|
"loss": 2.5899,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 1.85,
|
|
"learning_rate": 1.7321274880168302e-05,
|
|
"loss": 2.5092,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 1.87,
|
|
"learning_rate": 1.685563932847226e-05,
|
|
"loss": 2.5462,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 1.89,
|
|
"learning_rate": 1.6393143582850085e-05,
|
|
"loss": 2.6212,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 1.91,
|
|
"learning_rate": 1.5933965944215722e-05,
|
|
"loss": 2.4538,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 1.93,
|
|
"learning_rate": 1.552368938690414e-05,
|
|
"loss": 2.5211,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 1.94,
|
|
"learning_rate": 1.5071302734130489e-05,
|
|
"loss": 2.5329,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 1.96,
|
|
"learning_rate": 1.4622743783154223e-05,
|
|
"loss": 2.4453,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 1.4178185461989662e-05,
|
|
"loss": 2.5155,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"learning_rate": 1.3737799156332143e-05,
|
|
"loss": 2.5173,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"eval_loss": 2.537750244140625,
|
|
"eval_runtime": 85.1086,
|
|
"eval_samples_per_second": 206.971,
|
|
"eval_steps_per_second": 6.474,
|
|
"step": 1101
|
|
},
|
|
{
|
|
"epoch": 2.02,
|
|
"learning_rate": 1.330175464348567e-05,
|
|
"loss": 2.5912,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 2.03,
|
|
"learning_rate": 1.2870220026910407e-05,
|
|
"loss": 2.6022,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 2.05,
|
|
"learning_rate": 1.2443361671415687e-05,
|
|
"loss": 2.5254,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 2.07,
|
|
"learning_rate": 1.2021344139023186e-05,
|
|
"loss": 2.5473,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 2.09,
|
|
"learning_rate": 1.1604330125525079e-05,
|
|
"loss": 2.5018,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 2.11,
|
|
"learning_rate": 1.1192480397761837e-05,
|
|
"loss": 2.5195,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 2.13,
|
|
"learning_rate": 1.07859537316434e-05,
|
|
"loss": 2.5813,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 2.14,
|
|
"learning_rate": 1.0384906850938166e-05,
|
|
"loss": 2.5647,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 2.16,
|
|
"learning_rate": 9.989494366852904e-06,
|
|
"loss": 2.5216,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 2.18,
|
|
"learning_rate": 9.599868718427257e-06,
|
|
"loss": 2.5167,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 2.2,
|
|
"learning_rate": 9.216180113765558e-06,
|
|
"loss": 2.6351,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 2.22,
|
|
"learning_rate": 8.838576472128756e-06,
|
|
"loss": 2.4702,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 2.23,
|
|
"learning_rate": 8.467203366908707e-06,
|
|
"loss": 2.5762,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 2.25,
|
|
"learning_rate": 8.102203969506886e-06,
|
|
"loss": 2.5586,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 2.27,
|
|
"learning_rate": 7.743718994139071e-06,
|
|
"loss": 2.5613,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 2.29,
|
|
"learning_rate": 7.391886643587342e-06,
|
|
"loss": 2.515,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 2.31,
|
|
"learning_rate": 7.081037681011338e-06,
|
|
"loss": 2.4922,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 2.33,
|
|
"learning_rate": 6.742216838599091e-06,
|
|
"loss": 2.4874,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 2.34,
|
|
"learning_rate": 6.443292521679578e-06,
|
|
"loss": 2.4962,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 2.36,
|
|
"learning_rate": 6.117954692870412e-06,
|
|
"loss": 2.539,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 2.38,
|
|
"learning_rate": 5.7998962519633045e-06,
|
|
"loss": 2.6518,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 2.4,
|
|
"learning_rate": 5.489239816541755e-06,
|
|
"loss": 2.5034,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 2.42,
|
|
"learning_rate": 5.186105150575232e-06,
|
|
"loss": 2.5815,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 2.43,
|
|
"learning_rate": 4.890609118247888e-06,
|
|
"loss": 2.5083,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 2.45,
|
|
"learning_rate": 4.602865638905224e-06,
|
|
"loss": 2.504,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 2.47,
|
|
"learning_rate": 4.322985643135952e-06,
|
|
"loss": 2.5134,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 2.49,
|
|
"learning_rate": 4.051077030006228e-06,
|
|
"loss": 2.5468,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 2.51,
|
|
"learning_rate": 3.7872446254624104e-06,
|
|
"loss": 2.4814,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 2.52,
|
|
"learning_rate": 3.5820620283737616e-06,
|
|
"loss": 2.5374,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 2.54,
|
|
"learning_rate": 3.3330209972108976e-06,
|
|
"loss": 2.4908,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 2.56,
|
|
"learning_rate": 3.092332998903416e-06,
|
|
"loss": 2.4743,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 2.58,
|
|
"learning_rate": 2.860090823256359e-06,
|
|
"loss": 2.4944,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 2.6,
|
|
"learning_rate": 2.6363840040493747e-06,
|
|
"loss": 2.5556,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 2.62,
|
|
"learning_rate": 2.44241694508541e-06,
|
|
"loss": 2.4658,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 2.63,
|
|
"learning_rate": 2.235162156786502e-06,
|
|
"loss": 2.509,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 2.65,
|
|
"learning_rate": 2.0366836468246582e-06,
|
|
"loss": 2.5356,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 2.67,
|
|
"learning_rate": 1.8470579324436716e-06,
|
|
"loss": 2.5226,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 2.69,
|
|
"learning_rate": 1.6663581179661269e-06,
|
|
"loss": 2.4822,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 2.71,
|
|
"learning_rate": 1.4946538666102877e-06,
|
|
"loss": 2.4907,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 2.72,
|
|
"learning_rate": 1.3320113736335931e-06,
|
|
"loss": 2.6054,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 2.74,
|
|
"learning_rate": 1.178493340813125e-06,
|
|
"loss": 2.4541,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 2.76,
|
|
"learning_rate": 1.034158952272879e-06,
|
|
"loss": 2.5706,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 2.78,
|
|
"learning_rate": 8.990638516671756e-07,
|
|
"loss": 2.5482,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 2.8,
|
|
"learning_rate": 7.732601207289497e-07,
|
|
"loss": 2.5515,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 2.82,
|
|
"learning_rate": 6.567962591912613e-07,
|
|
"loss": 2.503,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 2.83,
|
|
"learning_rate": 5.497171660897221e-07,
|
|
"loss": 2.5679,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 2.85,
|
|
"learning_rate": 4.614041530991903e-07,
|
|
"loss": 2.49,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 2.87,
|
|
"learning_rate": 3.7226684613065333e-07,
|
|
"loss": 2.4828,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 2.89,
|
|
"learning_rate": 2.92623999103267e-07,
|
|
"loss": 2.5376,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 2.91,
|
|
"learning_rate": 2.2250631585063186e-07,
|
|
"loss": 2.5643,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 2.92,
|
|
"learning_rate": 1.619408280743917e-07,
|
|
"loss": 2.4503,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 2.94,
|
|
"learning_rate": 1.109508849230001e-07,
|
|
"loss": 2.5301,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 2.96,
|
|
"learning_rate": 6.955614399018206e-08,
|
|
"loss": 2.5575,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 2.98,
|
|
"learning_rate": 3.7772563736551694e-08,
|
|
"loss": 2.4909,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"learning_rate": 1.5612397337325113e-08,
|
|
"loss": 2.5105,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"eval_loss": 2.531651735305786,
|
|
"eval_runtime": 85.0956,
|
|
"eval_samples_per_second": 207.002,
|
|
"eval_steps_per_second": 6.475,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"step": 1650,
|
|
"total_flos": 2.463687632555606e+18,
|
|
"train_loss": 2.5754187508785362,
|
|
"train_runtime": 3709.1083,
|
|
"train_samples_per_second": 56.991,
|
|
"train_steps_per_second": 0.445
|
|
}
|
|
],
|
|
"max_steps": 1650,
|
|
"num_train_epochs": 3,
|
|
"total_flos": 2.463687632555606e+18,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|