generated from xuyuqing/ailab
962 lines
19 KiB
JSON
962 lines
19 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 9.984,
|
|
"global_step": 1560,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 9.998986144924251e-05,
|
|
"loss": 0.6631,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 9.995944990857849e-05,
|
|
"loss": 0.4459,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 9.990877771116589e-05,
|
|
"loss": 0.3515,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 9.983786540671051e-05,
|
|
"loss": 0.3238,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 9.974674175313228e-05,
|
|
"loss": 0.3121,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 9.96354437049027e-05,
|
|
"loss": 0.3103,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 9.95040163980582e-05,
|
|
"loss": 0.3007,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 9.935251313189564e-05,
|
|
"loss": 0.2868,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 9.918099534735718e-05,
|
|
"loss": 0.2881,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 9.898953260211338e-05,
|
|
"loss": 0.286,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 9.877820254235471e-05,
|
|
"loss": 0.2911,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 9.85470908713026e-05,
|
|
"loss": 0.2855,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 9.829629131445342e-05,
|
|
"loss": 0.2845,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 9.802590558156862e-05,
|
|
"loss": 0.2746,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 9.773604332542729e-05,
|
|
"loss": 0.2773,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"learning_rate": 9.742682209735727e-05,
|
|
"loss": 0.2711,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 1.09,
|
|
"learning_rate": 9.709836729956325e-05,
|
|
"loss": 0.2702,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 1.15,
|
|
"learning_rate": 9.675081213427076e-05,
|
|
"loss": 0.2653,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 1.22,
|
|
"learning_rate": 9.638429754970715e-05,
|
|
"loss": 0.2733,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 9.599897218294122e-05,
|
|
"loss": 0.2573,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 1.34,
|
|
"learning_rate": 9.559499229960451e-05,
|
|
"loss": 0.2712,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 1.41,
|
|
"learning_rate": 9.517252173051911e-05,
|
|
"loss": 0.2658,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 1.47,
|
|
"learning_rate": 9.473173180525737e-05,
|
|
"loss": 0.2602,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 1.54,
|
|
"learning_rate": 9.42728012826605e-05,
|
|
"loss": 0.2597,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"learning_rate": 9.37959162783444e-05,
|
|
"loss": 0.2603,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 1.66,
|
|
"learning_rate": 9.330127018922194e-05,
|
|
"loss": 0.2648,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 9.278906361507238e-05,
|
|
"loss": 0.2703,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 1.79,
|
|
"learning_rate": 9.225950427718975e-05,
|
|
"loss": 0.2554,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 1.86,
|
|
"learning_rate": 9.171280693414307e-05,
|
|
"loss": 0.2578,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 1.92,
|
|
"learning_rate": 9.114919329468282e-05,
|
|
"loss": 0.2605,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 9.056889192782866e-05,
|
|
"loss": 0.2635,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 2.05,
|
|
"learning_rate": 8.997213817017507e-05,
|
|
"loss": 0.2604,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 2.11,
|
|
"learning_rate": 8.935917403045251e-05,
|
|
"loss": 0.2505,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 2.18,
|
|
"learning_rate": 8.873024809138272e-05,
|
|
"loss": 0.2478,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 2.24,
|
|
"learning_rate": 8.808561540886796e-05,
|
|
"loss": 0.2514,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 2.3,
|
|
"learning_rate": 8.742553740855506e-05,
|
|
"loss": 0.2517,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 2.37,
|
|
"learning_rate": 8.675028177981643e-05,
|
|
"loss": 0.2472,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 2.43,
|
|
"learning_rate": 8.606012236719073e-05,
|
|
"loss": 0.2496,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 2.5,
|
|
"learning_rate": 8.535533905932738e-05,
|
|
"loss": 0.254,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 2.56,
|
|
"learning_rate": 8.463621767547998e-05,
|
|
"loss": 0.2545,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 2.62,
|
|
"learning_rate": 8.390304984959454e-05,
|
|
"loss": 0.2521,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 2.69,
|
|
"learning_rate": 8.315613291203976e-05,
|
|
"loss": 0.2482,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 2.75,
|
|
"learning_rate": 8.239576976902695e-05,
|
|
"loss": 0.2541,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 2.82,
|
|
"learning_rate": 8.162226877976887e-05,
|
|
"loss": 0.2548,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 2.88,
|
|
"learning_rate": 8.083594363142717e-05,
|
|
"loss": 0.248,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 2.94,
|
|
"learning_rate": 8.003711321189895e-05,
|
|
"loss": 0.2413,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 3.01,
|
|
"learning_rate": 7.922610148049445e-05,
|
|
"loss": 0.2477,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 3.07,
|
|
"learning_rate": 7.840323733655778e-05,
|
|
"loss": 0.2421,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 3.14,
|
|
"learning_rate": 7.756885448608459e-05,
|
|
"loss": 0.2408,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 3.2,
|
|
"learning_rate": 7.672329130639005e-05,
|
|
"loss": 0.2365,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 3.26,
|
|
"learning_rate": 7.586689070888284e-05,
|
|
"loss": 0.2405,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 3.33,
|
|
"learning_rate": 7.500000000000001e-05,
|
|
"loss": 0.2412,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 3.39,
|
|
"learning_rate": 7.412297074035967e-05,
|
|
"loss": 0.2331,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 3.46,
|
|
"learning_rate": 7.323615860218843e-05,
|
|
"loss": 0.2352,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 3.52,
|
|
"learning_rate": 7.233992322508129e-05,
|
|
"loss": 0.2455,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 3.58,
|
|
"learning_rate": 7.143462807015271e-05,
|
|
"loss": 0.2452,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 3.65,
|
|
"learning_rate": 7.052064027263786e-05,
|
|
"loss": 0.2443,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 3.71,
|
|
"learning_rate": 6.959833049300377e-05,
|
|
"loss": 0.2465,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 3.78,
|
|
"learning_rate": 6.866807276663106e-05,
|
|
"loss": 0.2355,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 3.84,
|
|
"learning_rate": 6.773024435212678e-05,
|
|
"loss": 0.2413,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 3.9,
|
|
"learning_rate": 6.678522557833024e-05,
|
|
"loss": 0.2441,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 3.97,
|
|
"learning_rate": 6.583339969007363e-05,
|
|
"loss": 0.2379,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 4.03,
|
|
"learning_rate": 6.487515269276016e-05,
|
|
"loss": 0.237,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 4.1,
|
|
"learning_rate": 6.391087319582264e-05,
|
|
"loss": 0.2281,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 4.16,
|
|
"learning_rate": 6.294095225512603e-05,
|
|
"loss": 0.2361,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 4.22,
|
|
"learning_rate": 6.19657832143779e-05,
|
|
"loss": 0.2409,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 4.29,
|
|
"learning_rate": 6.098576154561087e-05,
|
|
"loss": 0.2293,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 4.35,
|
|
"learning_rate": 6.0001284688802226e-05,
|
|
"loss": 0.2301,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 4.42,
|
|
"learning_rate": 5.90127518906953e-05,
|
|
"loss": 0.2338,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 4.48,
|
|
"learning_rate": 5.8020564042888015e-05,
|
|
"loss": 0.2297,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 4.54,
|
|
"learning_rate": 5.702512351925464e-05,
|
|
"loss": 0.232,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 4.61,
|
|
"learning_rate": 5.602683401276615e-05,
|
|
"loss": 0.2407,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 4.67,
|
|
"learning_rate": 5.502610037177586e-05,
|
|
"loss": 0.2425,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 4.74,
|
|
"learning_rate": 5.402332843583631e-05,
|
|
"loss": 0.2372,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 4.8,
|
|
"learning_rate": 5.3018924871114305e-05,
|
|
"loss": 0.2279,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 4.86,
|
|
"learning_rate": 5.201329700547076e-05,
|
|
"loss": 0.229,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 4.93,
|
|
"learning_rate": 5.100685266327202e-05,
|
|
"loss": 0.2394,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 4.99,
|
|
"learning_rate": 5e-05,
|
|
"loss": 0.2266,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 5.06,
|
|
"learning_rate": 4.899314733672799e-05,
|
|
"loss": 0.2349,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 5.12,
|
|
"learning_rate": 4.798670299452926e-05,
|
|
"loss": 0.2282,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 5.18,
|
|
"learning_rate": 4.6981075128885693e-05,
|
|
"loss": 0.2296,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 5.25,
|
|
"learning_rate": 4.597667156416371e-05,
|
|
"loss": 0.2258,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 5.31,
|
|
"learning_rate": 4.4973899628224154e-05,
|
|
"loss": 0.2306,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 5.38,
|
|
"learning_rate": 4.397316598723385e-05,
|
|
"loss": 0.2317,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 5.44,
|
|
"learning_rate": 4.297487648074538e-05,
|
|
"loss": 0.2265,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 5.5,
|
|
"learning_rate": 4.197943595711198e-05,
|
|
"loss": 0.2298,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 5.57,
|
|
"learning_rate": 4.0987248109304714e-05,
|
|
"loss": 0.2258,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 5.63,
|
|
"learning_rate": 3.9998715311197785e-05,
|
|
"loss": 0.2275,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 5.7,
|
|
"learning_rate": 3.901423845438916e-05,
|
|
"loss": 0.2267,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 5.76,
|
|
"learning_rate": 3.803421678562213e-05,
|
|
"loss": 0.2196,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 5.82,
|
|
"learning_rate": 3.705904774487396e-05,
|
|
"loss": 0.223,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 5.89,
|
|
"learning_rate": 3.608912680417737e-05,
|
|
"loss": 0.2267,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 5.95,
|
|
"learning_rate": 3.512484730723986e-05,
|
|
"loss": 0.2274,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 6.02,
|
|
"learning_rate": 3.4166600309926387e-05,
|
|
"loss": 0.2261,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 6.08,
|
|
"learning_rate": 3.3214774421669774e-05,
|
|
"loss": 0.2239,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 6.14,
|
|
"learning_rate": 3.226975564787322e-05,
|
|
"loss": 0.221,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 6.21,
|
|
"learning_rate": 3.133192723336895e-05,
|
|
"loss": 0.2191,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 6.27,
|
|
"learning_rate": 3.0401669506996256e-05,
|
|
"loss": 0.2228,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 6.34,
|
|
"learning_rate": 2.9479359727362173e-05,
|
|
"loss": 0.2186,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 6.4,
|
|
"learning_rate": 2.8565371929847284e-05,
|
|
"loss": 0.2222,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 6.46,
|
|
"learning_rate": 2.7660076774918708e-05,
|
|
"loss": 0.2333,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 6.53,
|
|
"learning_rate": 2.6763841397811573e-05,
|
|
"loss": 0.2176,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 6.59,
|
|
"learning_rate": 2.587702925964034e-05,
|
|
"loss": 0.2254,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 6.66,
|
|
"learning_rate": 2.500000000000001e-05,
|
|
"loss": 0.2223,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 6.72,
|
|
"learning_rate": 2.4133109291117156e-05,
|
|
"loss": 0.2209,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 6.78,
|
|
"learning_rate": 2.3276708693609943e-05,
|
|
"loss": 0.2194,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 6.85,
|
|
"learning_rate": 2.243114551391542e-05,
|
|
"loss": 0.2267,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 6.91,
|
|
"learning_rate": 2.1596762663442218e-05,
|
|
"loss": 0.2235,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 6.98,
|
|
"learning_rate": 2.077389851950557e-05,
|
|
"loss": 0.2257,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 7.04,
|
|
"learning_rate": 1.996288678810105e-05,
|
|
"loss": 0.2208,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 7.1,
|
|
"learning_rate": 1.9164056368572846e-05,
|
|
"loss": 0.2093,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 7.17,
|
|
"learning_rate": 1.837773122023114e-05,
|
|
"loss": 0.2132,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 7.23,
|
|
"learning_rate": 1.760423023097307e-05,
|
|
"loss": 0.2209,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 7.3,
|
|
"learning_rate": 1.684386708796025e-05,
|
|
"loss": 0.2157,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 7.36,
|
|
"learning_rate": 1.6096950150405454e-05,
|
|
"loss": 0.2164,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 7.42,
|
|
"learning_rate": 1.536378232452003e-05,
|
|
"loss": 0.2203,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 7.49,
|
|
"learning_rate": 1.4644660940672627e-05,
|
|
"loss": 0.2164,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 7.55,
|
|
"learning_rate": 1.3939877632809278e-05,
|
|
"loss": 0.2177,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 7.62,
|
|
"learning_rate": 1.3249718220183583e-05,
|
|
"loss": 0.2194,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 7.68,
|
|
"learning_rate": 1.257446259144494e-05,
|
|
"loss": 0.2221,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 7.74,
|
|
"learning_rate": 1.1914384591132044e-05,
|
|
"loss": 0.2202,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 7.81,
|
|
"learning_rate": 1.1269751908617277e-05,
|
|
"loss": 0.2215,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 7.87,
|
|
"learning_rate": 1.0640825969547496e-05,
|
|
"loss": 0.2231,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 7.94,
|
|
"learning_rate": 1.0027861829824952e-05,
|
|
"loss": 0.2214,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 8.0,
|
|
"learning_rate": 9.431108072171346e-06,
|
|
"loss": 0.214,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 8.06,
|
|
"learning_rate": 8.850806705317183e-06,
|
|
"loss": 0.2187,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 8.13,
|
|
"learning_rate": 8.287193065856935e-06,
|
|
"loss": 0.2183,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 8.19,
|
|
"learning_rate": 7.740495722810271e-06,
|
|
"loss": 0.219,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 8.26,
|
|
"learning_rate": 7.21093638492763e-06,
|
|
"loss": 0.2155,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 8.32,
|
|
"learning_rate": 6.698729810778065e-06,
|
|
"loss": 0.2187,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 8.38,
|
|
"learning_rate": 6.204083721655607e-06,
|
|
"loss": 0.2134,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 8.45,
|
|
"learning_rate": 5.727198717339511e-06,
|
|
"loss": 0.2155,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 8.51,
|
|
"learning_rate": 5.2682681947426375e-06,
|
|
"loss": 0.218,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 8.58,
|
|
"learning_rate": 4.827478269480895e-06,
|
|
"loss": 0.2136,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 8.64,
|
|
"learning_rate": 4.405007700395497e-06,
|
|
"loss": 0.2087,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 8.7,
|
|
"learning_rate": 4.001027817058789e-06,
|
|
"loss": 0.214,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 8.77,
|
|
"learning_rate": 3.615702450292857e-06,
|
|
"loss": 0.2213,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 8.83,
|
|
"learning_rate": 3.249187865729264e-06,
|
|
"loss": 0.2177,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 8.9,
|
|
"learning_rate": 2.901632700436757e-06,
|
|
"loss": 0.2193,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 8.96,
|
|
"learning_rate": 2.573177902642726e-06,
|
|
"loss": 0.2128,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 9.02,
|
|
"learning_rate": 2.2639566745727205e-06,
|
|
"loss": 0.2133,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 9.09,
|
|
"learning_rate": 1.974094418431388e-06,
|
|
"loss": 0.2176,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 9.15,
|
|
"learning_rate": 1.70370868554659e-06,
|
|
"loss": 0.2168,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 9.22,
|
|
"learning_rate": 1.4529091286973995e-06,
|
|
"loss": 0.22,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 9.28,
|
|
"learning_rate": 1.2217974576453073e-06,
|
|
"loss": 0.2134,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 9.34,
|
|
"learning_rate": 1.0104673978866164e-06,
|
|
"loss": 0.2084,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 9.41,
|
|
"learning_rate": 8.190046526428242e-07,
|
|
"loss": 0.2121,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 9.47,
|
|
"learning_rate": 6.474868681043578e-07,
|
|
"loss": 0.2208,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 9.54,
|
|
"learning_rate": 4.959836019417963e-07,
|
|
"loss": 0.217,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 9.6,
|
|
"learning_rate": 3.6455629509730136e-07,
|
|
"loss": 0.2184,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 9.66,
|
|
"learning_rate": 2.532582468677214e-07,
|
|
"loss": 0.2202,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 9.73,
|
|
"learning_rate": 1.6213459328950352e-07,
|
|
"loss": 0.21,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 9.79,
|
|
"learning_rate": 9.12222888341252e-08,
|
|
"loss": 0.2124,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 9.86,
|
|
"learning_rate": 4.055009142152067e-08,
|
|
"loss": 0.2099,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 9.92,
|
|
"learning_rate": 1.0138550757493592e-08,
|
|
"loss": 0.2104,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 9.98,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.2181,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 9.98,
|
|
"step": 1560,
|
|
"total_flos": 1.5785361539281715e+18,
|
|
"train_loss": 0.24185430575639774,
|
|
"train_runtime": 4148.4624,
|
|
"train_samples_per_second": 48.211,
|
|
"train_steps_per_second": 0.376
|
|
}
|
|
],
|
|
"max_steps": 1560,
|
|
"num_train_epochs": 10,
|
|
"total_flos": 1.5785361539281715e+18,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|