generated from xuyuqing/ailab
740 lines
14 KiB
JSON
740 lines
14 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 2.9943714821763603,
|
|
"global_step": 1197,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 9.998278026262586e-05,
|
|
"loss": 1.2245,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 9.995024035785423e-05,
|
|
"loss": 1.1725,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 9.989240907154061e-05,
|
|
"loss": 1.1171,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 9.982376339572303e-05,
|
|
"loss": 1.1048,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 9.971079882013168e-05,
|
|
"loss": 1.0833,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 9.955275726883517e-05,
|
|
"loss": 1.0715,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 9.93605842988853e-05,
|
|
"loss": 1.0753,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 9.913441227700498e-05,
|
|
"loss": 1.0741,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 9.890191654194604e-05,
|
|
"loss": 1.054,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 9.870221658430234e-05,
|
|
"loss": 1.0936,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 9.84213160452037e-05,
|
|
"loss": 1.0471,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 9.807752681793723e-05,
|
|
"loss": 1.0532,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 9.77397957149734e-05,
|
|
"loss": 1.0514,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 9.73754293589898e-05,
|
|
"loss": 1.0548,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 9.693958673061059e-05,
|
|
"loss": 1.0478,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 9.64714126079933e-05,
|
|
"loss": 1.0508,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 9.59712294645554e-05,
|
|
"loss": 1.0555,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 9.543938182119164e-05,
|
|
"loss": 1.0319,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 9.48762360089717e-05,
|
|
"loss": 1.0342,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 9.42821799168152e-05,
|
|
"loss": 1.0137,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 9.378495390198004e-05,
|
|
"loss": 1.07,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 9.313630456629796e-05,
|
|
"loss": 1.0473,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 9.245794339717895e-05,
|
|
"loss": 1.0701,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 9.175033764267012e-05,
|
|
"loss": 1.0561,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 9.101397469418175e-05,
|
|
"loss": 1.0478,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 9.024936175077722e-05,
|
|
"loss": 1.0688,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 8.945702546981969e-05,
|
|
"loss": 1.0811,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 8.863751160421594e-05,
|
|
"loss": 1.0946,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 8.779138462650758e-05,
|
|
"loss": 1.0887,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 8.691922734006828e-05,
|
|
"loss": 1.0664,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 8.602164047767487e-05,
|
|
"loss": 1.0711,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 8.509924228772902e-05,
|
|
"loss": 1.048,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 8.415266810841411e-05,
|
|
"loss": 1.0452,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 8.318256993008107e-05,
|
|
"loss": 1.0575,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 8.218961594616424e-05,
|
|
"loss": 1.051,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 8.117449009293668e-05,
|
|
"loss": 1.0317,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 8.01378915784222e-05,
|
|
"loss": 1.0507,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.908053440078809e-05,
|
|
"loss": 1.0442,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 7.800314685655073e-05,
|
|
"loss": 1.0515,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 7.690647103893258e-05,
|
|
"loss": 1.0387,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 1.03,
|
|
"learning_rate": 7.579126232671621e-05,
|
|
"loss": 1.035,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 1.05,
|
|
"learning_rate": 7.465828886394729e-05,
|
|
"loss": 1.0261,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 1.08,
|
|
"learning_rate": 7.350833103084504e-05,
|
|
"loss": 1.0129,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"learning_rate": 7.23421809062846e-05,
|
|
"loss": 1.0281,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 1.13,
|
|
"learning_rate": 7.116064172222125e-05,
|
|
"loss": 1.0178,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 1.15,
|
|
"learning_rate": 6.996452731043293e-05,
|
|
"loss": 1.0262,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 1.18,
|
|
"learning_rate": 6.87546615419612e-05,
|
|
"loss": 1.0123,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"learning_rate": 6.753187775963773e-05,
|
|
"loss": 1.0098,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 1.23,
|
|
"learning_rate": 6.62970182040864e-05,
|
|
"loss": 1.0186,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 1.25,
|
|
"learning_rate": 6.505093343359682e-05,
|
|
"loss": 1.0124,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 6.379448173826876e-05,
|
|
"loss": 1.0153,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 1.3,
|
|
"learning_rate": 6.252852854883089e-05,
|
|
"loss": 1.0101,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 1.33,
|
|
"learning_rate": 6.125394584054119e-05,
|
|
"loss": 1.0216,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 1.35,
|
|
"learning_rate": 5.997161153257963e-05,
|
|
"loss": 1.0014,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 1.38,
|
|
"learning_rate": 5.868240888334653e-05,
|
|
"loss": 1.0179,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.4,
|
|
"learning_rate": 5.7387225882083584e-05,
|
|
"loss": 0.9943,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 1.43,
|
|
"learning_rate": 5.6086954637236135e-05,
|
|
"loss": 1.0042,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"learning_rate": 5.47824907619782e-05,
|
|
"loss": 1.0006,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 1.48,
|
|
"learning_rate": 5.3474732757323666e-05,
|
|
"loss": 1.0209,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 1.5,
|
|
"learning_rate": 5.2164581393248056e-05,
|
|
"loss": 1.0143,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 1.53,
|
|
"learning_rate": 5.08529390882478e-05,
|
|
"loss": 1.0025,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"learning_rate": 4.9540709287763685e-05,
|
|
"loss": 1.0208,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 1.58,
|
|
"learning_rate": 4.822879584189731e-05,
|
|
"loss": 1.0163,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"learning_rate": 4.691810238284848e-05,
|
|
"loss": 1.0226,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 4.5609531702502905e-05,
|
|
"loss": 0.999,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 1.65,
|
|
"learning_rate": 4.430398513059861e-05,
|
|
"loss": 1.0099,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 1.68,
|
|
"learning_rate": 4.3002361913899544e-05,
|
|
"loss": 1.0034,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 1.7,
|
|
"learning_rate": 4.170555859680375e-05,
|
|
"loss": 0.9897,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 4.0414468403813095e-05,
|
|
"loss": 1.0012,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 1.75,
|
|
"learning_rate": 3.912998062428954e-05,
|
|
"loss": 1.0006,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 1.78,
|
|
"learning_rate": 3.785297999992206e-05,
|
|
"loss": 1.0068,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"learning_rate": 3.658434611532578e-05,
|
|
"loss": 0.9991,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"learning_rate": 3.532495279219336e-05,
|
|
"loss": 1.0079,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 1.85,
|
|
"learning_rate": 3.4075667487415785e-05,
|
|
"loss": 1.0042,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 1.88,
|
|
"learning_rate": 3.283735069558724e-05,
|
|
"loss": 0.9921,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 1.9,
|
|
"learning_rate": 3.1610855356305354e-05,
|
|
"loss": 0.9921,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 1.93,
|
|
"learning_rate": 3.0397026266675443e-05,
|
|
"loss": 0.9906,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 1.95,
|
|
"learning_rate": 2.919669949942314e-05,
|
|
"loss": 1.008,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 2.801070182701625e-05,
|
|
"loss": 1.0001,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"learning_rate": 2.6839850152192593e-05,
|
|
"loss": 1.0108,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 2.03,
|
|
"learning_rate": 2.5684950945285935e-05,
|
|
"loss": 0.9937,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 2.05,
|
|
"learning_rate": 2.4546799688737747e-05,
|
|
"loss": 0.9816,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 2.08,
|
|
"learning_rate": 2.3426180329177215e-05,
|
|
"loss": 0.968,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 2.1,
|
|
"learning_rate": 2.2323864737447063e-05,
|
|
"loss": 0.9874,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 2.13,
|
|
"learning_rate": 2.124061217694701e-05,
|
|
"loss": 0.983,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 2.15,
|
|
"learning_rate": 2.0177168780661126e-05,
|
|
"loss": 1.0007,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 2.18,
|
|
"learning_rate": 1.9134267037229243e-05,
|
|
"loss": 0.9891,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 2.2,
|
|
"learning_rate": 1.8112625286416553e-05,
|
|
"loss": 0.995,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 2.23,
|
|
"learning_rate": 1.7112947224328618e-05,
|
|
"loss": 0.9948,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 2.25,
|
|
"learning_rate": 1.6135921418712956e-05,
|
|
"loss": 0.9747,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 2.28,
|
|
"learning_rate": 1.5182220834680777e-05,
|
|
"loss": 0.9999,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 2.3,
|
|
"learning_rate": 1.4252502371175712e-05,
|
|
"loss": 0.9752,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 2.33,
|
|
"learning_rate": 1.3347406408508695e-05,
|
|
"loss": 0.9903,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 2.35,
|
|
"learning_rate": 1.2467556367270755e-05,
|
|
"loss": 0.9751,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 2.38,
|
|
"learning_rate": 1.1613558278927416e-05,
|
|
"loss": 0.9916,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 2.4,
|
|
"learning_rate": 1.0786000368390687e-05,
|
|
"loss": 0.9892,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 2.43,
|
|
"learning_rate": 9.985452648855803e-06,
|
|
"loss": 0.9688,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 2.45,
|
|
"learning_rate": 9.212466529182196e-06,
|
|
"loss": 0.9854,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 2.48,
|
|
"learning_rate": 8.46757443408886e-06,
|
|
"loss": 0.9905,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 2.5,
|
|
"learning_rate": 7.751289437425774e-06,
|
|
"loss": 0.9907,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 2.53,
|
|
"learning_rate": 7.0641049087740385e-06,
|
|
"loss": 0.9818,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 2.55,
|
|
"learning_rate": 6.406494173618083e-06,
|
|
"loss": 0.9869,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 2.58,
|
|
"learning_rate": 5.778910187324088e-06,
|
|
"loss": 0.975,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 2.6,
|
|
"learning_rate": 5.181785223148999e-06,
|
|
"loss": 0.987,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 2.63,
|
|
"learning_rate": 4.615530574495325e-06,
|
|
"loss": 0.987,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 2.65,
|
|
"learning_rate": 4.080536271616542e-06,
|
|
"loss": 0.9848,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 2.68,
|
|
"learning_rate": 3.577170812968328e-06,
|
|
"loss": 1.0055,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 2.7,
|
|
"learning_rate": 3.105780911390738e-06,
|
|
"loss": 0.9703,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 2.73,
|
|
"learning_rate": 2.666691255296011e-06,
|
|
"loss": 0.9766,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 2.75,
|
|
"learning_rate": 2.2602042850265957e-06,
|
|
"loss": 0.9941,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 2.78,
|
|
"learning_rate": 1.8865999845374793e-06,
|
|
"loss": 0.9905,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 2.8,
|
|
"learning_rate": 1.5461356885461075e-06,
|
|
"loss": 1.0004,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 2.83,
|
|
"learning_rate": 1.2390459052829595e-06,
|
|
"loss": 0.9939,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 2.85,
|
|
"learning_rate": 9.655421549647603e-07,
|
|
"loss": 0.995,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 2.88,
|
|
"learning_rate": 7.258128241015738e-07,
|
|
"loss": 0.9973,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 2.9,
|
|
"learning_rate": 5.200230357381208e-07,
|
|
"loss": 0.9845,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 2.93,
|
|
"learning_rate": 3.4831453571879667e-07,
|
|
"loss": 0.9807,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 2.95,
|
|
"learning_rate": 2.1080559505462505e-07,
|
|
"loss": 0.9849,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 2.98,
|
|
"learning_rate": 1.0759092845938901e-07,
|
|
"loss": 0.9902,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 2.99,
|
|
"step": 1197,
|
|
"total_flos": 1.6682737252895293e+18,
|
|
"train_loss": 1.0210458891732352,
|
|
"train_runtime": 3873.2552,
|
|
"train_samples_per_second": 39.619,
|
|
"train_steps_per_second": 0.309
|
|
}
|
|
],
|
|
"max_steps": 1197,
|
|
"num_train_epochs": 3,
|
|
"total_flos": 1.6682737252895293e+18,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|