| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "eval_steps": 500, | |
| "global_step": 1565, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 10.365610882647738, | |
| "learning_rate": 1.0191082802547772e-06, | |
| "loss": 0.9552, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.48926758766174316, | |
| "step": 5, | |
| "valid_targets_mean": 1394.4, | |
| "valid_targets_min": 336 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 8.318996862725456, | |
| "learning_rate": 2.2929936305732485e-06, | |
| "loss": 1.0418, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.4827696681022644, | |
| "step": 10, | |
| "valid_targets_mean": 1698.8, | |
| "valid_targets_min": 415 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 4.800454513112877, | |
| "learning_rate": 3.56687898089172e-06, | |
| "loss": 0.9398, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.5183535814285278, | |
| "step": 15, | |
| "valid_targets_mean": 2403.8, | |
| "valid_targets_min": 265 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 3.0418954263606626, | |
| "learning_rate": 4.840764331210192e-06, | |
| "loss": 0.8783, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3802979290485382, | |
| "step": 20, | |
| "valid_targets_mean": 1569.1, | |
| "valid_targets_min": 407 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.2890188508249805, | |
| "learning_rate": 6.114649681528663e-06, | |
| "loss": 0.797, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3261396884918213, | |
| "step": 25, | |
| "valid_targets_mean": 2250.6, | |
| "valid_targets_min": 385 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 1.3841977072174543, | |
| "learning_rate": 7.388535031847134e-06, | |
| "loss": 0.8957, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3375973701477051, | |
| "step": 30, | |
| "valid_targets_mean": 1467.9, | |
| "valid_targets_min": 389 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 1.0492083171705153, | |
| "learning_rate": 8.662420382165606e-06, | |
| "loss": 0.7641, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.38977062702178955, | |
| "step": 35, | |
| "valid_targets_mean": 1896.8, | |
| "valid_targets_min": 478 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 0.9068002529064226, | |
| "learning_rate": 9.936305732484078e-06, | |
| "loss": 0.7237, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3373463749885559, | |
| "step": 40, | |
| "valid_targets_mean": 1387.3, | |
| "valid_targets_min": 315 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 0.8392530938028915, | |
| "learning_rate": 1.1210191082802548e-05, | |
| "loss": 0.7991, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3486889898777008, | |
| "step": 45, | |
| "valid_targets_mean": 1371.1, | |
| "valid_targets_min": 312 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7808328470983302, | |
| "learning_rate": 1.248407643312102e-05, | |
| "loss": 0.7843, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.44006726145744324, | |
| "step": 50, | |
| "valid_targets_mean": 1924.5, | |
| "valid_targets_min": 372 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 0.6089838201868081, | |
| "learning_rate": 1.375796178343949e-05, | |
| "loss": 0.6712, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.40036633610725403, | |
| "step": 55, | |
| "valid_targets_mean": 2408.6, | |
| "valid_targets_min": 336 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 0.6408164489801972, | |
| "learning_rate": 1.5031847133757964e-05, | |
| "loss": 0.7013, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.35468024015426636, | |
| "step": 60, | |
| "valid_targets_mean": 1822.6, | |
| "valid_targets_min": 429 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 0.813402271214521, | |
| "learning_rate": 1.6305732484076436e-05, | |
| "loss": 0.7601, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.4636722505092621, | |
| "step": 65, | |
| "valid_targets_mean": 1724.1, | |
| "valid_targets_min": 310 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 0.6292275100030212, | |
| "learning_rate": 1.7579617834394907e-05, | |
| "loss": 0.7395, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.47178909182548523, | |
| "step": 70, | |
| "valid_targets_mean": 2548.1, | |
| "valid_targets_min": 298 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.6034438748705967, | |
| "learning_rate": 1.8853503184713376e-05, | |
| "loss": 0.6413, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3467163145542145, | |
| "step": 75, | |
| "valid_targets_mean": 1841.9, | |
| "valid_targets_min": 358 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 0.5649447902130157, | |
| "learning_rate": 2.0127388535031848e-05, | |
| "loss": 0.6269, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.30552658438682556, | |
| "step": 80, | |
| "valid_targets_mean": 1912.2, | |
| "valid_targets_min": 362 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 0.9943142572003123, | |
| "learning_rate": 2.140127388535032e-05, | |
| "loss": 0.6759, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.42743825912475586, | |
| "step": 85, | |
| "valid_targets_mean": 1922.1, | |
| "valid_targets_min": 308 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 0.6010175461160379, | |
| "learning_rate": 2.267515923566879e-05, | |
| "loss": 0.6583, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2835840582847595, | |
| "step": 90, | |
| "valid_targets_mean": 1609.9, | |
| "valid_targets_min": 306 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 0.5226255807747925, | |
| "learning_rate": 2.3949044585987263e-05, | |
| "loss": 0.5936, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3410557806491852, | |
| "step": 95, | |
| "valid_targets_mean": 2872.8, | |
| "valid_targets_min": 314 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.6534412955866619, | |
| "learning_rate": 2.5222929936305732e-05, | |
| "loss": 0.6684, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2774350941181183, | |
| "step": 100, | |
| "valid_targets_mean": 1370.1, | |
| "valid_targets_min": 302 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 0.6958389636053474, | |
| "learning_rate": 2.6496815286624204e-05, | |
| "loss": 0.6514, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3003643751144409, | |
| "step": 105, | |
| "valid_targets_mean": 1767.0, | |
| "valid_targets_min": 362 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 0.6415066379401502, | |
| "learning_rate": 2.7770700636942676e-05, | |
| "loss": 0.6503, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2702697515487671, | |
| "step": 110, | |
| "valid_targets_mean": 1446.6, | |
| "valid_targets_min": 286 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 0.7183491073051372, | |
| "learning_rate": 2.9044585987261148e-05, | |
| "loss": 0.6355, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.38555389642715454, | |
| "step": 115, | |
| "valid_targets_mean": 1719.9, | |
| "valid_targets_min": 301 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 0.617270181197203, | |
| "learning_rate": 3.0318471337579623e-05, | |
| "loss": 0.6916, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3203192949295044, | |
| "step": 120, | |
| "valid_targets_mean": 1649.8, | |
| "valid_targets_min": 305 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.4934317301925454, | |
| "learning_rate": 3.1592356687898095e-05, | |
| "loss": 0.607, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.31386327743530273, | |
| "step": 125, | |
| "valid_targets_mean": 3283.2, | |
| "valid_targets_min": 418 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 0.6091927165717088, | |
| "learning_rate": 3.286624203821656e-05, | |
| "loss": 0.6434, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.40177106857299805, | |
| "step": 130, | |
| "valid_targets_mean": 2241.1, | |
| "valid_targets_min": 374 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 0.5685738745138306, | |
| "learning_rate": 3.414012738853504e-05, | |
| "loss": 0.5824, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.32374608516693115, | |
| "step": 135, | |
| "valid_targets_mean": 2751.9, | |
| "valid_targets_min": 430 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 0.6592286023429951, | |
| "learning_rate": 3.541401273885351e-05, | |
| "loss": 0.6614, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.34401005506515503, | |
| "step": 140, | |
| "valid_targets_mean": 1769.7, | |
| "valid_targets_min": 374 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 0.638305549341345, | |
| "learning_rate": 3.6687898089171976e-05, | |
| "loss": 0.6079, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23753316700458527, | |
| "step": 145, | |
| "valid_targets_mean": 1323.4, | |
| "valid_targets_min": 281 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.5560181985804709, | |
| "learning_rate": 3.796178343949045e-05, | |
| "loss": 0.6635, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.33407092094421387, | |
| "step": 150, | |
| "valid_targets_mean": 2445.2, | |
| "valid_targets_min": 434 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 0.6348081601068993, | |
| "learning_rate": 3.923566878980892e-05, | |
| "loss": 0.6319, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3078237771987915, | |
| "step": 155, | |
| "valid_targets_mean": 1353.3, | |
| "valid_targets_min": 323 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 0.6819327189519154, | |
| "learning_rate": 3.999980086219931e-05, | |
| "loss": 0.6082, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3177946209907532, | |
| "step": 160, | |
| "valid_targets_mean": 1957.7, | |
| "valid_targets_min": 572 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 0.6069738297589734, | |
| "learning_rate": 3.9997560607483595e-05, | |
| "loss": 0.6253, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.38597410917282104, | |
| "step": 165, | |
| "valid_targets_mean": 2263.2, | |
| "valid_targets_min": 363 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 0.6120073455552506, | |
| "learning_rate": 3.999283145555291e-05, | |
| "loss": 0.6591, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.30860719084739685, | |
| "step": 170, | |
| "valid_targets_mean": 1836.8, | |
| "valid_targets_min": 514 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.6230281006746647, | |
| "learning_rate": 3.998561399499772e-05, | |
| "loss": 0.6076, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3062671720981598, | |
| "step": 175, | |
| "valid_targets_mean": 1840.8, | |
| "valid_targets_min": 320 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 0.5207076968783577, | |
| "learning_rate": 3.997590912410345e-05, | |
| "loss": 0.6107, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27832305431365967, | |
| "step": 180, | |
| "valid_targets_mean": 2283.2, | |
| "valid_targets_min": 356 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 0.678777597215989, | |
| "learning_rate": 3.996371805073874e-05, | |
| "loss": 0.6024, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3409014940261841, | |
| "step": 185, | |
| "valid_targets_mean": 1853.5, | |
| "valid_targets_min": 403 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 0.7245769852975813, | |
| "learning_rate": 3.994904229220507e-05, | |
| "loss": 0.6674, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3859507441520691, | |
| "step": 190, | |
| "valid_targets_mean": 1678.1, | |
| "valid_targets_min": 390 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 0.6523867436592213, | |
| "learning_rate": 3.9931883675047966e-05, | |
| "loss": 0.6027, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3305048942565918, | |
| "step": 195, | |
| "valid_targets_mean": 1755.2, | |
| "valid_targets_min": 406 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.4473481889133654, | |
| "learning_rate": 3.991224433482961e-05, | |
| "loss": 0.5699, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24788443744182587, | |
| "step": 200, | |
| "valid_targets_mean": 2629.7, | |
| "valid_targets_min": 322 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 0.6718409949102206, | |
| "learning_rate": 3.98901267158631e-05, | |
| "loss": 0.6116, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.30983588099479675, | |
| "step": 205, | |
| "valid_targets_mean": 1294.2, | |
| "valid_targets_min": 414 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 0.7075567613287291, | |
| "learning_rate": 3.98655335709082e-05, | |
| "loss": 0.6441, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.31481683254241943, | |
| "step": 210, | |
| "valid_targets_mean": 1779.5, | |
| "valid_targets_min": 417 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 0.5915980159411647, | |
| "learning_rate": 3.9838467960828745e-05, | |
| "loss": 0.5953, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3381510376930237, | |
| "step": 215, | |
| "valid_targets_mean": 2019.5, | |
| "valid_targets_min": 344 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 0.6846161715573829, | |
| "learning_rate": 3.9808933254211665e-05, | |
| "loss": 0.5793, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27559298276901245, | |
| "step": 220, | |
| "valid_targets_mean": 1383.5, | |
| "valid_targets_min": 376 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.6583499157954867, | |
| "learning_rate": 3.977693312694778e-05, | |
| "loss": 0.6357, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.37204593420028687, | |
| "step": 225, | |
| "valid_targets_mean": 1624.6, | |
| "valid_targets_min": 392 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 0.6711716119703148, | |
| "learning_rate": 3.974247156177423e-05, | |
| "loss": 0.579, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2619155943393707, | |
| "step": 230, | |
| "valid_targets_mean": 1260.9, | |
| "valid_targets_min": 308 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 0.5875891221127311, | |
| "learning_rate": 3.970555284777883e-05, | |
| "loss": 0.6093, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29293981194496155, | |
| "step": 235, | |
| "valid_targets_mean": 2052.9, | |
| "valid_targets_min": 281 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 0.5427240385670294, | |
| "learning_rate": 3.9666181579866244e-05, | |
| "loss": 0.642, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27305272221565247, | |
| "step": 240, | |
| "valid_targets_mean": 1545.4, | |
| "valid_targets_min": 329 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 0.6025668142195261, | |
| "learning_rate": 3.962436265818611e-05, | |
| "loss": 0.5645, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29572200775146484, | |
| "step": 245, | |
| "valid_targets_mean": 1501.6, | |
| "valid_targets_min": 315 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.6360380564834982, | |
| "learning_rate": 3.9580101287523105e-05, | |
| "loss": 0.6107, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.33826425671577454, | |
| "step": 250, | |
| "valid_targets_mean": 1896.1, | |
| "valid_targets_min": 382 | |
| }, | |
| { | |
| "epoch": 0.816, | |
| "grad_norm": 0.5962705557203846, | |
| "learning_rate": 3.953340297664928e-05, | |
| "loss": 0.5726, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24385955929756165, | |
| "step": 255, | |
| "valid_targets_mean": 2214.8, | |
| "valid_targets_min": 552 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 0.6343720044053153, | |
| "learning_rate": 3.948427353763829e-05, | |
| "loss": 0.5892, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28834953904151917, | |
| "step": 260, | |
| "valid_targets_mean": 1582.5, | |
| "valid_targets_min": 468 | |
| }, | |
| { | |
| "epoch": 0.848, | |
| "grad_norm": 0.6206932511216893, | |
| "learning_rate": 3.943271908514216e-05, | |
| "loss": 0.5588, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.35036319494247437, | |
| "step": 265, | |
| "valid_targets_mean": 1686.8, | |
| "valid_targets_min": 401 | |
| }, | |
| { | |
| "epoch": 0.864, | |
| "grad_norm": 0.6189312039660988, | |
| "learning_rate": 3.937874603563015e-05, | |
| "loss": 0.6116, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3610442876815796, | |
| "step": 270, | |
| "valid_targets_mean": 1867.8, | |
| "valid_targets_min": 324 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.5609968986370193, | |
| "learning_rate": 3.932236110659023e-05, | |
| "loss": 0.5898, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24128054082393646, | |
| "step": 275, | |
| "valid_targets_mean": 1498.9, | |
| "valid_targets_min": 304 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 0.6550290228777313, | |
| "learning_rate": 3.9263571315692976e-05, | |
| "loss": 0.5501, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27276456356048584, | |
| "step": 280, | |
| "valid_targets_mean": 1734.2, | |
| "valid_targets_min": 282 | |
| }, | |
| { | |
| "epoch": 0.912, | |
| "grad_norm": 0.5723215889355538, | |
| "learning_rate": 3.920238397991818e-05, | |
| "loss": 0.6237, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3226080536842346, | |
| "step": 285, | |
| "valid_targets_mean": 1889.1, | |
| "valid_targets_min": 317 | |
| }, | |
| { | |
| "epoch": 0.928, | |
| "grad_norm": 0.6228336603633774, | |
| "learning_rate": 3.913880671464418e-05, | |
| "loss": 0.6245, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.41412851214408875, | |
| "step": 290, | |
| "valid_targets_mean": 2260.9, | |
| "valid_targets_min": 507 | |
| }, | |
| { | |
| "epoch": 0.944, | |
| "grad_norm": 0.5413361031700534, | |
| "learning_rate": 3.907284743270001e-05, | |
| "loss": 0.5727, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.32943207025527954, | |
| "step": 295, | |
| "valid_targets_mean": 2217.2, | |
| "valid_targets_min": 373 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.5667098555365755, | |
| "learning_rate": 3.900451434338062e-05, | |
| "loss": 0.638, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2977871894836426, | |
| "step": 300, | |
| "valid_targets_mean": 1661.4, | |
| "valid_targets_min": 582 | |
| }, | |
| { | |
| "epoch": 0.976, | |
| "grad_norm": 0.44595818905414, | |
| "learning_rate": 3.893381595142511e-05, | |
| "loss": 0.5433, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24412350356578827, | |
| "step": 305, | |
| "valid_targets_mean": 2918.8, | |
| "valid_targets_min": 522 | |
| }, | |
| { | |
| "epoch": 0.992, | |
| "grad_norm": 0.5318234961379363, | |
| "learning_rate": 3.886076105595825e-05, | |
| "loss": 0.6035, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28200763463974, | |
| "step": 310, | |
| "valid_targets_mean": 2191.4, | |
| "valid_targets_min": 313 | |
| }, | |
| { | |
| "epoch": 1.0064, | |
| "grad_norm": 0.5666587480342058, | |
| "learning_rate": 3.878535874939532e-05, | |
| "loss": 0.5549, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22651788592338562, | |
| "step": 315, | |
| "valid_targets_mean": 1769.2, | |
| "valid_targets_min": 343 | |
| }, | |
| { | |
| "epoch": 1.0224, | |
| "grad_norm": 0.5741499793900294, | |
| "learning_rate": 3.870761841631051e-05, | |
| "loss": 0.6055, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2124353051185608, | |
| "step": 320, | |
| "valid_targets_mean": 1632.1, | |
| "valid_targets_min": 353 | |
| }, | |
| { | |
| "epoch": 1.0384, | |
| "grad_norm": 0.5205025073484473, | |
| "learning_rate": 3.862754973226887e-05, | |
| "loss": 0.583, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2766958475112915, | |
| "step": 325, | |
| "valid_targets_mean": 2115.7, | |
| "valid_targets_min": 441 | |
| }, | |
| { | |
| "epoch": 1.0544, | |
| "grad_norm": 0.5977766131152823, | |
| "learning_rate": 3.85451626626221e-05, | |
| "loss": 0.567, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2835386395454407, | |
| "step": 330, | |
| "valid_targets_mean": 2030.8, | |
| "valid_targets_min": 344 | |
| }, | |
| { | |
| "epoch": 1.0704, | |
| "grad_norm": 0.4901581966995571, | |
| "learning_rate": 3.846046746126827e-05, | |
| "loss": 0.5514, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24974173307418823, | |
| "step": 335, | |
| "valid_targets_mean": 2468.9, | |
| "valid_targets_min": 334 | |
| }, | |
| { | |
| "epoch": 1.0864, | |
| "grad_norm": 0.7209751282300725, | |
| "learning_rate": 3.837347466937562e-05, | |
| "loss": 0.5681, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29909491539001465, | |
| "step": 340, | |
| "valid_targets_mean": 1571.0, | |
| "valid_targets_min": 298 | |
| }, | |
| { | |
| "epoch": 1.1024, | |
| "grad_norm": 0.5689470714805076, | |
| "learning_rate": 3.828419511407062e-05, | |
| "loss": 0.5406, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2614998519420624, | |
| "step": 345, | |
| "valid_targets_mean": 2440.4, | |
| "valid_targets_min": 487 | |
| }, | |
| { | |
| "epoch": 1.1184, | |
| "grad_norm": 0.6678852337993968, | |
| "learning_rate": 3.819263990709037e-05, | |
| "loss": 0.5923, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3825380206108093, | |
| "step": 350, | |
| "valid_targets_mean": 1729.8, | |
| "valid_targets_min": 469 | |
| }, | |
| { | |
| "epoch": 1.1344, | |
| "grad_norm": 0.6305434492392159, | |
| "learning_rate": 3.809882044339971e-05, | |
| "loss": 0.5795, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3732347786426544, | |
| "step": 355, | |
| "valid_targets_mean": 1940.9, | |
| "valid_targets_min": 412 | |
| }, | |
| { | |
| "epoch": 1.1504, | |
| "grad_norm": 0.46443915237397776, | |
| "learning_rate": 3.800274839977293e-05, | |
| "loss": 0.548, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27172112464904785, | |
| "step": 360, | |
| "valid_targets_mean": 3067.0, | |
| "valid_targets_min": 296 | |
| }, | |
| { | |
| "epoch": 1.1663999999999999, | |
| "grad_norm": 0.6458112317408627, | |
| "learning_rate": 3.790443573334055e-05, | |
| "loss": 0.5339, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2920452952384949, | |
| "step": 365, | |
| "valid_targets_mean": 1547.1, | |
| "valid_targets_min": 374 | |
| }, | |
| { | |
| "epoch": 1.1824, | |
| "grad_norm": 0.6423557714677564, | |
| "learning_rate": 3.780389468010106e-05, | |
| "loss": 0.5221, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.31279245018959045, | |
| "step": 370, | |
| "valid_targets_mean": 1994.4, | |
| "valid_targets_min": 389 | |
| }, | |
| { | |
| "epoch": 1.1984, | |
| "grad_norm": 0.581575117007808, | |
| "learning_rate": 3.7701137753398075e-05, | |
| "loss": 0.5543, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28286123275756836, | |
| "step": 375, | |
| "valid_targets_mean": 2159.1, | |
| "valid_targets_min": 863 | |
| }, | |
| { | |
| "epoch": 1.2144, | |
| "grad_norm": 0.7659286309991649, | |
| "learning_rate": 3.759617774236292e-05, | |
| "loss": 0.5601, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28801482915878296, | |
| "step": 380, | |
| "valid_targets_mean": 1025.9, | |
| "valid_targets_min": 384 | |
| }, | |
| { | |
| "epoch": 1.2304, | |
| "grad_norm": 0.6009120577635161, | |
| "learning_rate": 3.748902771032288e-05, | |
| "loss": 0.5497, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3129914402961731, | |
| "step": 385, | |
| "valid_targets_mean": 1648.7, | |
| "valid_targets_min": 457 | |
| }, | |
| { | |
| "epoch": 1.2464, | |
| "grad_norm": 0.5107267663004865, | |
| "learning_rate": 3.737970099317535e-05, | |
| "loss": 0.5535, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29217737913131714, | |
| "step": 390, | |
| "valid_targets_mean": 2424.2, | |
| "valid_targets_min": 527 | |
| }, | |
| { | |
| "epoch": 1.2624, | |
| "grad_norm": 0.6748563323937695, | |
| "learning_rate": 3.726821119772803e-05, | |
| "loss": 0.572, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29269343614578247, | |
| "step": 395, | |
| "valid_targets_mean": 1255.2, | |
| "valid_targets_min": 319 | |
| }, | |
| { | |
| "epoch": 1.2784, | |
| "grad_norm": 0.6150539685960873, | |
| "learning_rate": 3.7154572200005446e-05, | |
| "loss": 0.5855, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2708214521408081, | |
| "step": 400, | |
| "valid_targets_mean": 1865.2, | |
| "valid_targets_min": 367 | |
| }, | |
| { | |
| "epoch": 1.2944, | |
| "grad_norm": 0.6942542907714536, | |
| "learning_rate": 3.703879814352193e-05, | |
| "loss": 0.5664, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23922036588191986, | |
| "step": 405, | |
| "valid_targets_mean": 1504.2, | |
| "valid_targets_min": 519 | |
| }, | |
| { | |
| "epoch": 1.3104, | |
| "grad_norm": 0.6473740553177609, | |
| "learning_rate": 3.6920903437521305e-05, | |
| "loss": 0.5471, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2532743215560913, | |
| "step": 410, | |
| "valid_targets_mean": 1413.0, | |
| "valid_targets_min": 357 | |
| }, | |
| { | |
| "epoch": 1.3264, | |
| "grad_norm": 0.5600748415504437, | |
| "learning_rate": 3.680090275518352e-05, | |
| "loss": 0.5389, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.19406437873840332, | |
| "step": 415, | |
| "valid_targets_mean": 1652.2, | |
| "valid_targets_min": 330 | |
| }, | |
| { | |
| "epoch": 1.3424, | |
| "grad_norm": 0.5569084301061894, | |
| "learning_rate": 3.667881103179844e-05, | |
| "loss": 0.5724, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3152419924736023, | |
| "step": 420, | |
| "valid_targets_mean": 2236.6, | |
| "valid_targets_min": 609 | |
| }, | |
| { | |
| "epoch": 1.3584, | |
| "grad_norm": 0.7245711012662981, | |
| "learning_rate": 3.655464346290697e-05, | |
| "loss": 0.5519, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3345484435558319, | |
| "step": 425, | |
| "valid_targets_mean": 1572.2, | |
| "valid_targets_min": 360 | |
| }, | |
| { | |
| "epoch": 1.3744, | |
| "grad_norm": 0.5399589780136985, | |
| "learning_rate": 3.642841550240983e-05, | |
| "loss": 0.552, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2587853670120239, | |
| "step": 430, | |
| "valid_targets_mean": 1710.2, | |
| "valid_targets_min": 330 | |
| }, | |
| { | |
| "epoch": 1.3904, | |
| "grad_norm": 0.7073654607596489, | |
| "learning_rate": 3.630014286064419e-05, | |
| "loss": 0.5792, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.36871421337127686, | |
| "step": 435, | |
| "valid_targets_mean": 1905.2, | |
| "valid_targets_min": 414 | |
| }, | |
| { | |
| "epoch": 1.4064, | |
| "grad_norm": 0.6165859182173671, | |
| "learning_rate": 3.6169841502428285e-05, | |
| "loss": 0.5774, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21847328543663025, | |
| "step": 440, | |
| "valid_targets_mean": 1438.1, | |
| "valid_targets_min": 278 | |
| }, | |
| { | |
| "epoch": 1.4224, | |
| "grad_norm": 0.5621114473337674, | |
| "learning_rate": 3.603752764507454e-05, | |
| "loss": 0.5577, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29782426357269287, | |
| "step": 445, | |
| "valid_targets_mean": 2356.9, | |
| "valid_targets_min": 356 | |
| }, | |
| { | |
| "epoch": 1.4384000000000001, | |
| "grad_norm": 0.634400597909609, | |
| "learning_rate": 3.5903217756371066e-05, | |
| "loss": 0.6153, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3603796660900116, | |
| "step": 450, | |
| "valid_targets_mean": 1890.1, | |
| "valid_targets_min": 318 | |
| }, | |
| { | |
| "epoch": 1.4544000000000001, | |
| "grad_norm": 0.5762772475053114, | |
| "learning_rate": 3.576692855253213e-05, | |
| "loss": 0.5819, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2907714247703552, | |
| "step": 455, | |
| "valid_targets_mean": 2168.4, | |
| "valid_targets_min": 496 | |
| }, | |
| { | |
| "epoch": 1.4704, | |
| "grad_norm": 0.6042058905595971, | |
| "learning_rate": 3.562867699611764e-05, | |
| "loss": 0.5701, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2983001172542572, | |
| "step": 460, | |
| "valid_targets_mean": 1742.3, | |
| "valid_targets_min": 497 | |
| }, | |
| { | |
| "epoch": 1.4864, | |
| "grad_norm": 0.6232223003277538, | |
| "learning_rate": 3.5488480293922e-05, | |
| "loss": 0.5515, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.34741562604904175, | |
| "step": 465, | |
| "valid_targets_mean": 1680.1, | |
| "valid_targets_min": 410 | |
| }, | |
| { | |
| "epoch": 1.5024, | |
| "grad_norm": 0.5191562499661858, | |
| "learning_rate": 3.5346355894832515e-05, | |
| "loss": 0.5794, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2747904062271118, | |
| "step": 470, | |
| "valid_targets_mean": 2039.3, | |
| "valid_targets_min": 456 | |
| }, | |
| { | |
| "epoch": 1.5184, | |
| "grad_norm": 0.6315483521632249, | |
| "learning_rate": 3.520232148765774e-05, | |
| "loss": 0.5348, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.19648823142051697, | |
| "step": 475, | |
| "valid_targets_mean": 1314.0, | |
| "valid_targets_min": 365 | |
| }, | |
| { | |
| "epoch": 1.5344, | |
| "grad_norm": 0.5866989398606268, | |
| "learning_rate": 3.505639499892591e-05, | |
| "loss": 0.5461, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21248644590377808, | |
| "step": 480, | |
| "valid_targets_mean": 1864.5, | |
| "valid_targets_min": 512 | |
| }, | |
| { | |
| "epoch": 1.5504, | |
| "grad_norm": 0.5111549559639873, | |
| "learning_rate": 3.490859459065382e-05, | |
| "loss": 0.5355, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3530312478542328, | |
| "step": 485, | |
| "valid_targets_mean": 2767.8, | |
| "valid_targets_min": 420 | |
| }, | |
| { | |
| "epoch": 1.5664, | |
| "grad_norm": 0.6708193947913946, | |
| "learning_rate": 3.475893865808633e-05, | |
| "loss": 0.5729, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28748923540115356, | |
| "step": 490, | |
| "valid_targets_mean": 1501.6, | |
| "valid_targets_min": 320 | |
| }, | |
| { | |
| "epoch": 1.5824, | |
| "grad_norm": 0.4011057903489599, | |
| "learning_rate": 3.4607445827406984e-05, | |
| "loss": 0.5517, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.17250488698482513, | |
| "step": 495, | |
| "valid_targets_mean": 2437.2, | |
| "valid_targets_min": 330 | |
| }, | |
| { | |
| "epoch": 1.5984, | |
| "grad_norm": 0.5514130641780497, | |
| "learning_rate": 3.445413495341971e-05, | |
| "loss": 0.5239, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20387420058250427, | |
| "step": 500, | |
| "valid_targets_mean": 1304.2, | |
| "valid_targets_min": 335 | |
| }, | |
| { | |
| "epoch": 1.6143999999999998, | |
| "grad_norm": 0.5305060649630657, | |
| "learning_rate": 3.429902511720216e-05, | |
| "loss": 0.505, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.19152876734733582, | |
| "step": 505, | |
| "valid_targets_mean": 1746.8, | |
| "valid_targets_min": 432 | |
| }, | |
| { | |
| "epoch": 1.6303999999999998, | |
| "grad_norm": 0.5860156139840498, | |
| "learning_rate": 3.4142135623730954e-05, | |
| "loss": 0.5301, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2521340250968933, | |
| "step": 510, | |
| "valid_targets_mean": 1201.4, | |
| "valid_targets_min": 380 | |
| }, | |
| { | |
| "epoch": 1.6463999999999999, | |
| "grad_norm": 0.6046799452495678, | |
| "learning_rate": 3.398348599947888e-05, | |
| "loss": 0.567, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2523391842842102, | |
| "step": 515, | |
| "valid_targets_mean": 1271.8, | |
| "valid_targets_min": 325 | |
| }, | |
| { | |
| "epoch": 1.6623999999999999, | |
| "grad_norm": 0.5962853068653082, | |
| "learning_rate": 3.3823095989984697e-05, | |
| "loss": 0.5464, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3069849908351898, | |
| "step": 520, | |
| "valid_targets_mean": 1735.7, | |
| "valid_targets_min": 370 | |
| }, | |
| { | |
| "epoch": 1.6784, | |
| "grad_norm": 0.5642209848101286, | |
| "learning_rate": 3.366098555739557e-05, | |
| "loss": 0.5693, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2982049584388733, | |
| "step": 525, | |
| "valid_targets_mean": 2335.8, | |
| "valid_targets_min": 324 | |
| }, | |
| { | |
| "epoch": 1.6944, | |
| "grad_norm": 0.5454108288696442, | |
| "learning_rate": 3.349717487798261e-05, | |
| "loss": 0.5608, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2737429141998291, | |
| "step": 530, | |
| "valid_targets_mean": 1879.2, | |
| "valid_targets_min": 404 | |
| }, | |
| { | |
| "epoch": 1.7104, | |
| "grad_norm": 0.7555257758629741, | |
| "learning_rate": 3.3331684339629706e-05, | |
| "loss": 0.586, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.35174569487571716, | |
| "step": 535, | |
| "valid_targets_mean": 1304.4, | |
| "valid_targets_min": 321 | |
| }, | |
| { | |
| "epoch": 1.7264, | |
| "grad_norm": 0.7307148698923294, | |
| "learning_rate": 3.3164534539296056e-05, | |
| "loss": 0.5806, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.31116968393325806, | |
| "step": 540, | |
| "valid_targets_mean": 1233.2, | |
| "valid_targets_min": 345 | |
| }, | |
| { | |
| "epoch": 1.7424, | |
| "grad_norm": 0.6159389066521203, | |
| "learning_rate": 3.299574628045269e-05, | |
| "loss": 0.5685, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2987688183784485, | |
| "step": 545, | |
| "valid_targets_mean": 1732.8, | |
| "valid_targets_min": 327 | |
| }, | |
| { | |
| "epoch": 1.7584, | |
| "grad_norm": 0.5271196547986835, | |
| "learning_rate": 3.282534057049322e-05, | |
| "loss": 0.5582, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.16863203048706055, | |
| "step": 550, | |
| "valid_targets_mean": 1493.8, | |
| "valid_targets_min": 319 | |
| }, | |
| { | |
| "epoch": 1.7744, | |
| "grad_norm": 0.6087837858647173, | |
| "learning_rate": 3.265333861811933e-05, | |
| "loss": 0.5683, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.34451255202293396, | |
| "step": 555, | |
| "valid_targets_mean": 2183.1, | |
| "valid_targets_min": 352 | |
| }, | |
| { | |
| "epoch": 1.7904, | |
| "grad_norm": 0.5228238699536046, | |
| "learning_rate": 3.2479761830701075e-05, | |
| "loss": 0.5628, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26007527112960815, | |
| "step": 560, | |
| "valid_targets_mean": 1822.6, | |
| "valid_targets_min": 501 | |
| }, | |
| { | |
| "epoch": 1.8064, | |
| "grad_norm": 0.7066412180100229, | |
| "learning_rate": 3.230463181161254e-05, | |
| "loss": 0.5917, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2669428586959839, | |
| "step": 565, | |
| "valid_targets_mean": 1130.3, | |
| "valid_targets_min": 326 | |
| }, | |
| { | |
| "epoch": 1.8224, | |
| "grad_norm": 0.565546916181129, | |
| "learning_rate": 3.212797035754311e-05, | |
| "loss": 0.5397, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.19365203380584717, | |
| "step": 570, | |
| "valid_targets_mean": 1359.0, | |
| "valid_targets_min": 311 | |
| }, | |
| { | |
| "epoch": 1.8384, | |
| "grad_norm": 0.5986440547301436, | |
| "learning_rate": 3.194979945578461e-05, | |
| "loss": 0.5721, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2979618012905121, | |
| "step": 575, | |
| "valid_targets_mean": 1635.5, | |
| "valid_targets_min": 337 | |
| }, | |
| { | |
| "epoch": 1.8544, | |
| "grad_norm": 0.526574384507719, | |
| "learning_rate": 3.177014128149479e-05, | |
| "loss": 0.5409, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2373972088098526, | |
| "step": 580, | |
| "valid_targets_mean": 1976.0, | |
| "valid_targets_min": 320 | |
| }, | |
| { | |
| "epoch": 1.8704, | |
| "grad_norm": 0.5407978134499969, | |
| "learning_rate": 3.158901819493742e-05, | |
| "loss": 0.5682, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3032454252243042, | |
| "step": 585, | |
| "valid_targets_mean": 2200.4, | |
| "valid_targets_min": 438 | |
| }, | |
| { | |
| "epoch": 1.8864, | |
| "grad_norm": 0.5262593226825127, | |
| "learning_rate": 3.1406452738699284e-05, | |
| "loss": 0.5468, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2405659258365631, | |
| "step": 590, | |
| "valid_targets_mean": 1451.5, | |
| "valid_targets_min": 337 | |
| }, | |
| { | |
| "epoch": 1.9024, | |
| "grad_norm": 0.7074003472143059, | |
| "learning_rate": 3.122246763488457e-05, | |
| "loss": 0.5666, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2271709144115448, | |
| "step": 595, | |
| "valid_targets_mean": 976.3, | |
| "valid_targets_min": 364 | |
| }, | |
| { | |
| "epoch": 1.9184, | |
| "grad_norm": 0.6418230662980694, | |
| "learning_rate": 3.103708578228686e-05, | |
| "loss": 0.5487, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3241179287433624, | |
| "step": 600, | |
| "valid_targets_mean": 1757.9, | |
| "valid_targets_min": 309 | |
| }, | |
| { | |
| "epoch": 1.9344000000000001, | |
| "grad_norm": 0.6599064657549003, | |
| "learning_rate": 3.085033025353915e-05, | |
| "loss": 0.5561, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29733017086982727, | |
| "step": 605, | |
| "valid_targets_mean": 1896.1, | |
| "valid_targets_min": 306 | |
| }, | |
| { | |
| "epoch": 1.9504000000000001, | |
| "grad_norm": 0.5188859669086576, | |
| "learning_rate": 3.066222429224221e-05, | |
| "loss": 0.541, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.25629156827926636, | |
| "step": 610, | |
| "valid_targets_mean": 1635.2, | |
| "valid_targets_min": 435 | |
| }, | |
| { | |
| "epoch": 1.9664000000000001, | |
| "grad_norm": 0.7113751595997874, | |
| "learning_rate": 3.047279131007173e-05, | |
| "loss": 0.5601, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3459906578063965, | |
| "step": 615, | |
| "valid_targets_mean": 1404.6, | |
| "valid_targets_min": 352 | |
| }, | |
| { | |
| "epoch": 1.9824000000000002, | |
| "grad_norm": 0.6385871829438637, | |
| "learning_rate": 3.0282054883864434e-05, | |
| "loss": 0.5602, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.32659295201301575, | |
| "step": 620, | |
| "valid_targets_mean": 1352.7, | |
| "valid_targets_min": 344 | |
| }, | |
| { | |
| "epoch": 1.9984, | |
| "grad_norm": 0.5223112786838583, | |
| "learning_rate": 3.009003875268379e-05, | |
| "loss": 0.5869, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2662052810192108, | |
| "step": 625, | |
| "valid_targets_mean": 1918.2, | |
| "valid_targets_min": 318 | |
| }, | |
| { | |
| "epoch": 2.0128, | |
| "grad_norm": 0.5839740119468653, | |
| "learning_rate": 2.9896766814865355e-05, | |
| "loss": 0.5383, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27003538608551025, | |
| "step": 630, | |
| "valid_targets_mean": 1757.9, | |
| "valid_targets_min": 352 | |
| }, | |
| { | |
| "epoch": 2.0288, | |
| "grad_norm": 0.6313940695586244, | |
| "learning_rate": 2.970226312504246e-05, | |
| "loss": 0.5301, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29057958722114563, | |
| "step": 635, | |
| "valid_targets_mean": 1927.1, | |
| "valid_targets_min": 329 | |
| }, | |
| { | |
| "epoch": 2.0448, | |
| "grad_norm": 0.6274558609840433, | |
| "learning_rate": 2.9506551891152334e-05, | |
| "loss": 0.5469, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27966347336769104, | |
| "step": 640, | |
| "valid_targets_mean": 1441.1, | |
| "valid_targets_min": 300 | |
| }, | |
| { | |
| "epoch": 2.0608, | |
| "grad_norm": 0.5810705694916855, | |
| "learning_rate": 2.930965747142319e-05, | |
| "loss": 0.5077, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2656835615634918, | |
| "step": 645, | |
| "valid_targets_mean": 1744.5, | |
| "valid_targets_min": 358 | |
| }, | |
| { | |
| "epoch": 2.0768, | |
| "grad_norm": 0.6013853648532194, | |
| "learning_rate": 2.9111604371342593e-05, | |
| "loss": 0.5257, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26053082942962646, | |
| "step": 650, | |
| "valid_targets_mean": 1872.3, | |
| "valid_targets_min": 419 | |
| }, | |
| { | |
| "epoch": 2.0928, | |
| "grad_norm": 0.553453566945297, | |
| "learning_rate": 2.891241724060752e-05, | |
| "loss": 0.5629, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2635321319103241, | |
| "step": 655, | |
| "valid_targets_mean": 1575.1, | |
| "valid_targets_min": 485 | |
| }, | |
| { | |
| "epoch": 2.1088, | |
| "grad_norm": 0.5798900170494127, | |
| "learning_rate": 2.8712120870056455e-05, | |
| "loss": 0.5029, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3110058903694153, | |
| "step": 660, | |
| "valid_targets_mean": 1943.1, | |
| "valid_targets_min": 276 | |
| }, | |
| { | |
| "epoch": 2.1248, | |
| "grad_norm": 0.7208783260886057, | |
| "learning_rate": 2.851074018858389e-05, | |
| "loss": 0.5457, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.30960655212402344, | |
| "step": 665, | |
| "valid_targets_mean": 1142.4, | |
| "valid_targets_min": 281 | |
| }, | |
| { | |
| "epoch": 2.1408, | |
| "grad_norm": 0.6800765283733173, | |
| "learning_rate": 2.8308300260037734e-05, | |
| "loss": 0.4996, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2656320035457611, | |
| "step": 670, | |
| "valid_targets_mean": 1283.1, | |
| "valid_targets_min": 479 | |
| }, | |
| { | |
| "epoch": 2.1568, | |
| "grad_norm": 0.5165999502544153, | |
| "learning_rate": 2.8104826280099796e-05, | |
| "loss": 0.5311, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27948087453842163, | |
| "step": 675, | |
| "valid_targets_mean": 2333.4, | |
| "valid_targets_min": 498 | |
| }, | |
| { | |
| "epoch": 2.1728, | |
| "grad_norm": 0.6334171848552108, | |
| "learning_rate": 2.7900343573150003e-05, | |
| "loss": 0.5516, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29954349994659424, | |
| "step": 680, | |
| "valid_targets_mean": 1819.2, | |
| "valid_targets_min": 376 | |
| }, | |
| { | |
| "epoch": 2.1888, | |
| "grad_norm": 0.721155834054541, | |
| "learning_rate": 2.7694877589114442e-05, | |
| "loss": 0.5294, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3683796525001526, | |
| "step": 685, | |
| "valid_targets_mean": 1527.6, | |
| "valid_targets_min": 364 | |
| }, | |
| { | |
| "epoch": 2.2048, | |
| "grad_norm": 0.5815421923387037, | |
| "learning_rate": 2.748845390029794e-05, | |
| "loss": 0.5245, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23173637688159943, | |
| "step": 690, | |
| "valid_targets_mean": 1531.8, | |
| "valid_targets_min": 341 | |
| }, | |
| { | |
| "epoch": 2.2208, | |
| "grad_norm": 0.6051089755827583, | |
| "learning_rate": 2.728109819820129e-05, | |
| "loss": 0.4767, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.31305450201034546, | |
| "step": 695, | |
| "valid_targets_mean": 1759.6, | |
| "valid_targets_min": 436 | |
| }, | |
| { | |
| "epoch": 2.2368, | |
| "grad_norm": 0.4983653007676144, | |
| "learning_rate": 2.7072836290323698e-05, | |
| "loss": 0.4928, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2711527347564697, | |
| "step": 700, | |
| "valid_targets_mean": 2523.6, | |
| "valid_targets_min": 363 | |
| }, | |
| { | |
| "epoch": 2.2528, | |
| "grad_norm": 0.680321321473548, | |
| "learning_rate": 2.6863694096950763e-05, | |
| "loss": 0.4998, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28383302688598633, | |
| "step": 705, | |
| "valid_targets_mean": 1973.8, | |
| "valid_targets_min": 326 | |
| }, | |
| { | |
| "epoch": 2.2688, | |
| "grad_norm": 0.5499307894582378, | |
| "learning_rate": 2.6653697647928485e-05, | |
| "loss": 0.5574, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.30596303939819336, | |
| "step": 710, | |
| "valid_targets_mean": 1937.1, | |
| "valid_targets_min": 442 | |
| }, | |
| { | |
| "epoch": 2.2848, | |
| "grad_norm": 0.6867251624958461, | |
| "learning_rate": 2.644287307942352e-05, | |
| "loss": 0.5369, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22647109627723694, | |
| "step": 715, | |
| "valid_targets_mean": 1304.1, | |
| "valid_targets_min": 344 | |
| }, | |
| { | |
| "epoch": 2.3008, | |
| "grad_norm": 0.6096841630437606, | |
| "learning_rate": 2.623124663067034e-05, | |
| "loss": 0.5451, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3565707206726074, | |
| "step": 720, | |
| "valid_targets_mean": 2535.9, | |
| "valid_targets_min": 358 | |
| }, | |
| { | |
| "epoch": 2.3168, | |
| "grad_norm": 0.6306294215439894, | |
| "learning_rate": 2.6018844640705448e-05, | |
| "loss": 0.5312, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23345035314559937, | |
| "step": 725, | |
| "valid_targets_mean": 1478.1, | |
| "valid_targets_min": 383 | |
| }, | |
| { | |
| "epoch": 2.3327999999999998, | |
| "grad_norm": 0.4888587401466767, | |
| "learning_rate": 2.580569354508925e-05, | |
| "loss": 0.5022, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26344504952430725, | |
| "step": 730, | |
| "valid_targets_mean": 2650.0, | |
| "valid_targets_min": 268 | |
| }, | |
| { | |
| "epoch": 2.3487999999999998, | |
| "grad_norm": 0.583979306967278, | |
| "learning_rate": 2.5591819872615856e-05, | |
| "loss": 0.4998, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29211124777793884, | |
| "step": 735, | |
| "valid_targets_mean": 2555.1, | |
| "valid_targets_min": 479 | |
| }, | |
| { | |
| "epoch": 2.3648, | |
| "grad_norm": 0.6127635931481561, | |
| "learning_rate": 2.5377250242011338e-05, | |
| "loss": 0.5147, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27184176445007324, | |
| "step": 740, | |
| "valid_targets_mean": 1855.9, | |
| "valid_targets_min": 433 | |
| }, | |
| { | |
| "epoch": 2.3808, | |
| "grad_norm": 0.5378873769551493, | |
| "learning_rate": 2.516201135862073e-05, | |
| "loss": 0.4974, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24594935774803162, | |
| "step": 745, | |
| "valid_targets_mean": 1570.9, | |
| "valid_targets_min": 468 | |
| }, | |
| { | |
| "epoch": 2.3968, | |
| "grad_norm": 0.6213390734526246, | |
| "learning_rate": 2.494613001108431e-05, | |
| "loss": 0.5638, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3126072287559509, | |
| "step": 750, | |
| "valid_targets_mean": 1707.1, | |
| "valid_targets_min": 356 | |
| }, | |
| { | |
| "epoch": 2.4128, | |
| "grad_norm": 0.5519219108629317, | |
| "learning_rate": 2.4729633068003466e-05, | |
| "loss": 0.5238, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2724781036376953, | |
| "step": 755, | |
| "valid_targets_mean": 2266.9, | |
| "valid_targets_min": 334 | |
| }, | |
| { | |
| "epoch": 2.4288, | |
| "grad_norm": 0.5289646828722117, | |
| "learning_rate": 2.4512547474596624e-05, | |
| "loss": 0.5293, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21402958035469055, | |
| "step": 760, | |
| "valid_targets_mean": 2041.2, | |
| "valid_targets_min": 348 | |
| }, | |
| { | |
| "epoch": 2.4448, | |
| "grad_norm": 0.4686786342950784, | |
| "learning_rate": 2.429490024934566e-05, | |
| "loss": 0.4998, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21400263905525208, | |
| "step": 765, | |
| "valid_targets_mean": 2511.7, | |
| "valid_targets_min": 304 | |
| }, | |
| { | |
| "epoch": 2.4608, | |
| "grad_norm": 0.6217378245973096, | |
| "learning_rate": 2.4076718480633178e-05, | |
| "loss": 0.5446, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22741106152534485, | |
| "step": 770, | |
| "valid_targets_mean": 1596.1, | |
| "valid_targets_min": 377 | |
| }, | |
| { | |
| "epoch": 2.4768, | |
| "grad_norm": 0.6020364504917762, | |
| "learning_rate": 2.3858029323371067e-05, | |
| "loss": 0.5358, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21303874254226685, | |
| "step": 775, | |
| "valid_targets_mean": 1515.4, | |
| "valid_targets_min": 325 | |
| }, | |
| { | |
| "epoch": 2.4928, | |
| "grad_norm": 0.5690585232304295, | |
| "learning_rate": 2.363885999562084e-05, | |
| "loss": 0.5321, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2814662456512451, | |
| "step": 780, | |
| "valid_targets_mean": 2192.6, | |
| "valid_targets_min": 356 | |
| }, | |
| { | |
| "epoch": 2.5088, | |
| "grad_norm": 0.6091924978472609, | |
| "learning_rate": 2.3419237775206026e-05, | |
| "loss": 0.526, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2218950241804123, | |
| "step": 785, | |
| "valid_targets_mean": 2015.3, | |
| "valid_targets_min": 541 | |
| }, | |
| { | |
| "epoch": 2.5248, | |
| "grad_norm": 0.46438171340993656, | |
| "learning_rate": 2.3199189996317205e-05, | |
| "loss": 0.4907, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2658768892288208, | |
| "step": 790, | |
| "valid_targets_mean": 3509.2, | |
| "valid_targets_min": 434 | |
| }, | |
| { | |
| "epoch": 2.5408, | |
| "grad_norm": 0.47727776630400814, | |
| "learning_rate": 2.297874404610998e-05, | |
| "loss": 0.534, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23701204359531403, | |
| "step": 795, | |
| "valid_targets_mean": 2583.4, | |
| "valid_targets_min": 363 | |
| }, | |
| { | |
| "epoch": 2.5568, | |
| "grad_norm": 0.6241644112826841, | |
| "learning_rate": 2.2757927361296376e-05, | |
| "loss": 0.5622, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27000898122787476, | |
| "step": 800, | |
| "valid_targets_mean": 1973.5, | |
| "valid_targets_min": 430 | |
| }, | |
| { | |
| "epoch": 2.5728, | |
| "grad_norm": 0.5539757445821889, | |
| "learning_rate": 2.2536767424730052e-05, | |
| "loss": 0.5281, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2509377598762512, | |
| "step": 805, | |
| "valid_targets_mean": 1509.6, | |
| "valid_targets_min": 325 | |
| }, | |
| { | |
| "epoch": 2.5888, | |
| "grad_norm": 0.6966844834857684, | |
| "learning_rate": 2.2315291761985803e-05, | |
| "loss": 0.482, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2346486747264862, | |
| "step": 810, | |
| "valid_targets_mean": 1629.5, | |
| "valid_targets_min": 324 | |
| }, | |
| { | |
| "epoch": 2.6048, | |
| "grad_norm": 0.5284888703121386, | |
| "learning_rate": 2.2093527937933716e-05, | |
| "loss": 0.5531, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26848292350769043, | |
| "step": 815, | |
| "valid_targets_mean": 2305.7, | |
| "valid_targets_min": 321 | |
| }, | |
| { | |
| "epoch": 2.6208, | |
| "grad_norm": 0.6240909086729051, | |
| "learning_rate": 2.1871503553308447e-05, | |
| "loss": 0.5123, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24034564197063446, | |
| "step": 820, | |
| "valid_targets_mean": 1178.2, | |
| "valid_targets_min": 482 | |
| }, | |
| { | |
| "epoch": 2.6368, | |
| "grad_norm": 0.5391319618986805, | |
| "learning_rate": 2.164924624127403e-05, | |
| "loss": 0.5416, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.16607390344142914, | |
| "step": 825, | |
| "valid_targets_mean": 1136.4, | |
| "valid_targets_min": 452 | |
| }, | |
| { | |
| "epoch": 2.6528, | |
| "grad_norm": 0.7153894537702798, | |
| "learning_rate": 2.1426783663984648e-05, | |
| "loss": 0.5389, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27678871154785156, | |
| "step": 830, | |
| "valid_targets_mean": 1122.9, | |
| "valid_targets_min": 302 | |
| }, | |
| { | |
| "epoch": 2.6688, | |
| "grad_norm": 0.5201269288320189, | |
| "learning_rate": 2.1204143509141818e-05, | |
| "loss": 0.5122, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.18790093064308167, | |
| "step": 835, | |
| "valid_targets_mean": 2153.4, | |
| "valid_targets_min": 343 | |
| }, | |
| { | |
| "epoch": 2.6848, | |
| "grad_norm": 0.5745300424883989, | |
| "learning_rate": 2.0981353486548363e-05, | |
| "loss": 0.509, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26986461877822876, | |
| "step": 840, | |
| "valid_targets_mean": 1936.9, | |
| "valid_targets_min": 392 | |
| }, | |
| { | |
| "epoch": 2.7008, | |
| "grad_norm": 0.5354374318106816, | |
| "learning_rate": 2.075844132465964e-05, | |
| "loss": 0.4815, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2566140294075012, | |
| "step": 845, | |
| "valid_targets_mean": 2095.2, | |
| "valid_targets_min": 339 | |
| }, | |
| { | |
| "epoch": 2.7168, | |
| "grad_norm": 0.6884842467105364, | |
| "learning_rate": 2.0535434767132495e-05, | |
| "loss": 0.5322, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29088377952575684, | |
| "step": 850, | |
| "valid_targets_mean": 1338.5, | |
| "valid_targets_min": 349 | |
| }, | |
| { | |
| "epoch": 2.7328, | |
| "grad_norm": 0.592811257887557, | |
| "learning_rate": 2.0312361569372215e-05, | |
| "loss": 0.5355, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.237553209066391, | |
| "step": 855, | |
| "valid_targets_mean": 1656.7, | |
| "valid_targets_min": 284 | |
| }, | |
| { | |
| "epoch": 2.7488, | |
| "grad_norm": 0.7009568075199551, | |
| "learning_rate": 2.0089249495078186e-05, | |
| "loss": 0.547, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.30477380752563477, | |
| "step": 860, | |
| "valid_targets_mean": 1595.6, | |
| "valid_targets_min": 378 | |
| }, | |
| { | |
| "epoch": 2.7648, | |
| "grad_norm": 0.612366254611472, | |
| "learning_rate": 1.9866126312788333e-05, | |
| "loss": 0.537, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27290135622024536, | |
| "step": 865, | |
| "valid_targets_mean": 2508.3, | |
| "valid_targets_min": 292 | |
| }, | |
| { | |
| "epoch": 2.7808, | |
| "grad_norm": 0.6287627006901175, | |
| "learning_rate": 1.964301979242308e-05, | |
| "loss": 0.5236, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2584809958934784, | |
| "step": 870, | |
| "valid_targets_mean": 1590.4, | |
| "valid_targets_min": 414 | |
| }, | |
| { | |
| "epoch": 2.7968, | |
| "grad_norm": 0.5035138331546867, | |
| "learning_rate": 1.9419957701829138e-05, | |
| "loss": 0.5137, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23823773860931396, | |
| "step": 875, | |
| "valid_targets_mean": 2576.1, | |
| "valid_targets_min": 389 | |
| }, | |
| { | |
| "epoch": 2.8128, | |
| "grad_norm": 0.5661823207071477, | |
| "learning_rate": 1.9196967803323464e-05, | |
| "loss": 0.513, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2821880877017975, | |
| "step": 880, | |
| "valid_targets_mean": 2015.2, | |
| "valid_targets_min": 344 | |
| }, | |
| { | |
| "epoch": 2.8288, | |
| "grad_norm": 0.6228231204791509, | |
| "learning_rate": 1.8974077850237983e-05, | |
| "loss": 0.5326, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2654019296169281, | |
| "step": 885, | |
| "valid_targets_mean": 1703.9, | |
| "valid_targets_min": 398 | |
| }, | |
| { | |
| "epoch": 2.8448, | |
| "grad_norm": 0.5654584646040889, | |
| "learning_rate": 1.875131558346542e-05, | |
| "loss": 0.5151, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22387553751468658, | |
| "step": 890, | |
| "valid_targets_mean": 1856.3, | |
| "valid_targets_min": 311 | |
| }, | |
| { | |
| "epoch": 2.8608000000000002, | |
| "grad_norm": 0.5773875427841509, | |
| "learning_rate": 1.8528708728006654e-05, | |
| "loss": 0.4997, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27452605962753296, | |
| "step": 895, | |
| "valid_targets_mean": 1736.9, | |
| "valid_targets_min": 405 | |
| }, | |
| { | |
| "epoch": 2.8768000000000002, | |
| "grad_norm": 0.5732438859191387, | |
| "learning_rate": 1.8306284989520055e-05, | |
| "loss": 0.5427, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2751728892326355, | |
| "step": 900, | |
| "valid_targets_mean": 1612.8, | |
| "valid_targets_min": 329 | |
| }, | |
| { | |
| "epoch": 2.8928000000000003, | |
| "grad_norm": 0.6385684581814615, | |
| "learning_rate": 1.8084072050873265e-05, | |
| "loss": 0.5241, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24755489826202393, | |
| "step": 905, | |
| "valid_targets_mean": 1545.8, | |
| "valid_targets_min": 324 | |
| }, | |
| { | |
| "epoch": 2.9088000000000003, | |
| "grad_norm": 0.46498889068445215, | |
| "learning_rate": 1.786209756869775e-05, | |
| "loss": 0.5044, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22594481706619263, | |
| "step": 910, | |
| "valid_targets_mean": 2193.2, | |
| "valid_targets_min": 345 | |
| }, | |
| { | |
| "epoch": 2.9248, | |
| "grad_norm": 0.5572362594825182, | |
| "learning_rate": 1.764038916994669e-05, | |
| "loss": 0.5095, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20807725191116333, | |
| "step": 915, | |
| "valid_targets_mean": 1596.6, | |
| "valid_targets_min": 359 | |
| }, | |
| { | |
| "epoch": 2.9408, | |
| "grad_norm": 0.6219014704924913, | |
| "learning_rate": 1.741897444845649e-05, | |
| "loss": 0.5417, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28473109006881714, | |
| "step": 920, | |
| "valid_targets_mean": 1392.3, | |
| "valid_targets_min": 373 | |
| }, | |
| { | |
| "epoch": 2.9568, | |
| "grad_norm": 0.6217088354230381, | |
| "learning_rate": 1.7197880961512498e-05, | |
| "loss": 0.4969, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28627312183380127, | |
| "step": 925, | |
| "valid_targets_mean": 1407.1, | |
| "valid_targets_min": 515 | |
| }, | |
| { | |
| "epoch": 2.9728, | |
| "grad_norm": 0.5477325406546804, | |
| "learning_rate": 1.6977136226419187e-05, | |
| "loss": 0.5493, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22506684064865112, | |
| "step": 930, | |
| "valid_targets_mean": 1630.3, | |
| "valid_targets_min": 570 | |
| }, | |
| { | |
| "epoch": 2.9888, | |
| "grad_norm": 0.5054897525636634, | |
| "learning_rate": 1.6756767717075354e-05, | |
| "loss": 0.4961, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.25903797149658203, | |
| "step": 935, | |
| "valid_targets_mean": 2358.1, | |
| "valid_targets_min": 573 | |
| }, | |
| { | |
| "epoch": 3.0032, | |
| "grad_norm": 0.6344780315884032, | |
| "learning_rate": 1.6536802860554723e-05, | |
| "loss": 0.4896, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.276022732257843, | |
| "step": 940, | |
| "valid_targets_mean": 1502.6, | |
| "valid_targets_min": 367 | |
| }, | |
| { | |
| "epoch": 3.0192, | |
| "grad_norm": 0.6013905175624438, | |
| "learning_rate": 1.631726903369238e-05, | |
| "loss": 0.5074, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28036779165267944, | |
| "step": 945, | |
| "valid_targets_mean": 2008.5, | |
| "valid_targets_min": 505 | |
| }, | |
| { | |
| "epoch": 3.0352, | |
| "grad_norm": 0.5967281691305278, | |
| "learning_rate": 1.609819355967744e-05, | |
| "loss": 0.5266, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29622554779052734, | |
| "step": 950, | |
| "valid_targets_mean": 2090.8, | |
| "valid_targets_min": 364 | |
| }, | |
| { | |
| "epoch": 3.0512, | |
| "grad_norm": 0.6446729739158132, | |
| "learning_rate": 1.587960370465239e-05, | |
| "loss": 0.4584, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2446470856666565, | |
| "step": 955, | |
| "valid_targets_mean": 1775.6, | |
| "valid_targets_min": 502 | |
| }, | |
| { | |
| "epoch": 3.0672, | |
| "grad_norm": 0.6869934081930906, | |
| "learning_rate": 1.5661526674319582e-05, | |
| "loss": 0.5678, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.35943692922592163, | |
| "step": 960, | |
| "valid_targets_mean": 1705.9, | |
| "valid_targets_min": 330 | |
| }, | |
| { | |
| "epoch": 3.0832, | |
| "grad_norm": 0.5227087553237807, | |
| "learning_rate": 1.544398961055516e-05, | |
| "loss": 0.4944, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2873580753803253, | |
| "step": 965, | |
| "valid_targets_mean": 2310.8, | |
| "valid_targets_min": 313 | |
| }, | |
| { | |
| "epoch": 3.0992, | |
| "grad_norm": 0.6166244456703587, | |
| "learning_rate": 1.5227019588031035e-05, | |
| "loss": 0.5084, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.30043256282806396, | |
| "step": 970, | |
| "valid_targets_mean": 1549.4, | |
| "valid_targets_min": 355 | |
| }, | |
| { | |
| "epoch": 3.1152, | |
| "grad_norm": 0.5129158712932121, | |
| "learning_rate": 1.501064361084511e-05, | |
| "loss": 0.5182, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1984570473432541, | |
| "step": 975, | |
| "valid_targets_mean": 1646.1, | |
| "valid_targets_min": 310 | |
| }, | |
| { | |
| "epoch": 3.1312, | |
| "grad_norm": 0.6230108622245827, | |
| "learning_rate": 1.47948886091604e-05, | |
| "loss": 0.4964, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24358248710632324, | |
| "step": 980, | |
| "valid_targets_mean": 1548.6, | |
| "valid_targets_min": 268 | |
| }, | |
| { | |
| "epoch": 3.1471999999999998, | |
| "grad_norm": 0.4917071378261795, | |
| "learning_rate": 1.4579781435853289e-05, | |
| "loss": 0.4462, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20579692721366882, | |
| "step": 985, | |
| "valid_targets_mean": 2780.6, | |
| "valid_targets_min": 315 | |
| }, | |
| { | |
| "epoch": 3.1632, | |
| "grad_norm": 0.5588151311221814, | |
| "learning_rate": 1.4365348863171406e-05, | |
| "loss": 0.4826, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21542513370513916, | |
| "step": 990, | |
| "valid_targets_mean": 1787.9, | |
| "valid_targets_min": 304 | |
| }, | |
| { | |
| "epoch": 3.1792, | |
| "grad_norm": 0.5977497960476809, | |
| "learning_rate": 1.4151617579401551e-05, | |
| "loss": 0.4901, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20614565908908844, | |
| "step": 995, | |
| "valid_targets_mean": 1470.6, | |
| "valid_targets_min": 298 | |
| }, | |
| { | |
| "epoch": 3.1952, | |
| "grad_norm": 0.4729523303344217, | |
| "learning_rate": 1.3938614185548094e-05, | |
| "loss": 0.4824, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1801915168762207, | |
| "step": 1000, | |
| "valid_targets_mean": 2032.3, | |
| "valid_targets_min": 374 | |
| }, | |
| { | |
| "epoch": 3.2112, | |
| "grad_norm": 0.5291243015487793, | |
| "learning_rate": 1.3726365192022173e-05, | |
| "loss": 0.5063, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.25157517194747925, | |
| "step": 1005, | |
| "valid_targets_mean": 2172.0, | |
| "valid_targets_min": 386 | |
| }, | |
| { | |
| "epoch": 3.2272, | |
| "grad_norm": 0.48448565727707893, | |
| "learning_rate": 1.3514897015342257e-05, | |
| "loss": 0.4954, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.15489669144153595, | |
| "step": 1010, | |
| "valid_targets_mean": 1431.9, | |
| "valid_targets_min": 321 | |
| }, | |
| { | |
| "epoch": 3.2432, | |
| "grad_norm": 0.5307496011176411, | |
| "learning_rate": 1.3304235974846295e-05, | |
| "loss": 0.5227, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2140897810459137, | |
| "step": 1015, | |
| "valid_targets_mean": 1613.6, | |
| "valid_targets_min": 346 | |
| }, | |
| { | |
| "epoch": 3.2592, | |
| "grad_norm": 0.7076672546581853, | |
| "learning_rate": 1.3094408289416052e-05, | |
| "loss": 0.493, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.273044615983963, | |
| "step": 1020, | |
| "valid_targets_mean": 1225.0, | |
| "valid_targets_min": 374 | |
| }, | |
| { | |
| "epoch": 3.2752, | |
| "grad_norm": 0.5403450998922833, | |
| "learning_rate": 1.2885440074213877e-05, | |
| "loss": 0.4882, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.18412335216999054, | |
| "step": 1025, | |
| "valid_targets_mean": 1801.4, | |
| "valid_targets_min": 362 | |
| }, | |
| { | |
| "epoch": 3.2912, | |
| "grad_norm": 0.5959361576157355, | |
| "learning_rate": 1.267735733743242e-05, | |
| "loss": 0.5061, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28913840651512146, | |
| "step": 1030, | |
| "valid_targets_mean": 2159.1, | |
| "valid_targets_min": 300 | |
| }, | |
| { | |
| "epoch": 3.3072, | |
| "grad_norm": 0.6581434687860452, | |
| "learning_rate": 1.2470185977057643e-05, | |
| "loss": 0.4915, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20571711659431458, | |
| "step": 1035, | |
| "valid_targets_mean": 1431.0, | |
| "valid_targets_min": 344 | |
| }, | |
| { | |
| "epoch": 3.3232, | |
| "grad_norm": 0.5506339257794051, | |
| "learning_rate": 1.2263951777645588e-05, | |
| "loss": 0.4834, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20041485130786896, | |
| "step": 1040, | |
| "valid_targets_mean": 1944.2, | |
| "valid_targets_min": 304 | |
| }, | |
| { | |
| "epoch": 3.3392, | |
| "grad_norm": 0.598799850401766, | |
| "learning_rate": 1.2058680407113176e-05, | |
| "loss": 0.5302, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20620858669281006, | |
| "step": 1045, | |
| "valid_targets_mean": 1729.9, | |
| "valid_targets_min": 309 | |
| }, | |
| { | |
| "epoch": 3.3552, | |
| "grad_norm": 0.6872060140109199, | |
| "learning_rate": 1.1854397413543626e-05, | |
| "loss": 0.4909, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.280687153339386, | |
| "step": 1050, | |
| "valid_targets_mean": 1968.1, | |
| "valid_targets_min": 394 | |
| }, | |
| { | |
| "epoch": 3.3712, | |
| "grad_norm": 0.634770029673403, | |
| "learning_rate": 1.1651128222006713e-05, | |
| "loss": 0.5139, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.19431248307228088, | |
| "step": 1055, | |
| "valid_targets_mean": 1178.1, | |
| "valid_targets_min": 520 | |
| }, | |
| { | |
| "epoch": 3.3872, | |
| "grad_norm": 0.8051188801330211, | |
| "learning_rate": 1.1448898131394364e-05, | |
| "loss": 0.5223, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.25682786107063293, | |
| "step": 1060, | |
| "valid_targets_mean": 1219.4, | |
| "valid_targets_min": 347 | |
| }, | |
| { | |
| "epoch": 3.4032, | |
| "grad_norm": 0.7144553987559213, | |
| "learning_rate": 1.124773231127196e-05, | |
| "loss": 0.5129, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2120952010154724, | |
| "step": 1065, | |
| "valid_targets_mean": 1352.9, | |
| "valid_targets_min": 353 | |
| }, | |
| { | |
| "epoch": 3.4192, | |
| "grad_norm": 0.6928068165345987, | |
| "learning_rate": 1.1047655798745752e-05, | |
| "loss": 0.5174, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29641154408454895, | |
| "step": 1070, | |
| "valid_targets_mean": 1713.2, | |
| "valid_targets_min": 546 | |
| }, | |
| { | |
| "epoch": 3.4352, | |
| "grad_norm": 0.6331378430169772, | |
| "learning_rate": 1.084869349534671e-05, | |
| "loss": 0.5272, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3342178463935852, | |
| "step": 1075, | |
| "valid_targets_mean": 2134.9, | |
| "valid_targets_min": 301 | |
| }, | |
| { | |
| "epoch": 3.4512, | |
| "grad_norm": 0.6752991422130726, | |
| "learning_rate": 1.0650870163931275e-05, | |
| "loss": 0.5236, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24553604423999786, | |
| "step": 1080, | |
| "valid_targets_mean": 1135.5, | |
| "valid_targets_min": 416 | |
| }, | |
| { | |
| "epoch": 3.4672, | |
| "grad_norm": 0.5708685810431685, | |
| "learning_rate": 1.0454210425599426e-05, | |
| "loss": 0.4998, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20251703262329102, | |
| "step": 1085, | |
| "valid_targets_mean": 1731.3, | |
| "valid_targets_min": 340 | |
| }, | |
| { | |
| "epoch": 3.4832, | |
| "grad_norm": 0.6939639040967115, | |
| "learning_rate": 1.0258738756630255e-05, | |
| "loss": 0.504, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3255230784416199, | |
| "step": 1090, | |
| "valid_targets_mean": 1520.6, | |
| "valid_targets_min": 560 | |
| }, | |
| { | |
| "epoch": 3.4992, | |
| "grad_norm": 0.5867331892169605, | |
| "learning_rate": 1.0064479485435737e-05, | |
| "loss": 0.4987, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24216380715370178, | |
| "step": 1095, | |
| "valid_targets_mean": 2015.6, | |
| "valid_targets_min": 589 | |
| }, | |
| { | |
| "epoch": 3.5152, | |
| "grad_norm": 0.5861202088836924, | |
| "learning_rate": 9.871456789532736e-06, | |
| "loss": 0.51, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28611472249031067, | |
| "step": 1100, | |
| "valid_targets_mean": 2129.8, | |
| "valid_targets_min": 385 | |
| }, | |
| { | |
| "epoch": 3.5312, | |
| "grad_norm": 0.4891255447320874, | |
| "learning_rate": 9.679694692533909e-06, | |
| "loss": 0.4866, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21725909411907196, | |
| "step": 1105, | |
| "valid_targets_mean": 2537.8, | |
| "valid_targets_min": 390 | |
| }, | |
| { | |
| "epoch": 3.5472, | |
| "grad_norm": 0.6180630982797738, | |
| "learning_rate": 9.489217061157744e-06, | |
| "loss": 0.488, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.17382796108722687, | |
| "step": 1110, | |
| "valid_targets_mean": 1297.0, | |
| "valid_targets_min": 400 | |
| }, | |
| { | |
| "epoch": 3.5632, | |
| "grad_norm": 0.6651791274620857, | |
| "learning_rate": 9.30004760225806e-06, | |
| "loss": 0.5194, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3019868731498718, | |
| "step": 1115, | |
| "valid_targets_mean": 1609.6, | |
| "valid_targets_min": 417 | |
| }, | |
| { | |
| "epoch": 3.5792, | |
| "grad_norm": 0.6821483106290898, | |
| "learning_rate": 9.112209859873479e-06, | |
| "loss": 0.4928, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.19057822227478027, | |
| "step": 1120, | |
| "valid_targets_mean": 1106.1, | |
| "valid_targets_min": 308 | |
| }, | |
| { | |
| "epoch": 3.5952, | |
| "grad_norm": 0.5443897677311413, | |
| "learning_rate": 8.925727212297154e-06, | |
| "loss": 0.4936, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2540109157562256, | |
| "step": 1125, | |
| "valid_targets_mean": 2322.9, | |
| "valid_targets_min": 380 | |
| }, | |
| { | |
| "epoch": 3.6112, | |
| "grad_norm": 0.6850244082296009, | |
| "learning_rate": 8.74062286916705e-06, | |
| "loss": 0.5065, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22895655035972595, | |
| "step": 1130, | |
| "valid_targets_mean": 1482.4, | |
| "valid_targets_min": 358 | |
| }, | |
| { | |
| "epoch": 3.6272, | |
| "grad_norm": 0.7217480949579403, | |
| "learning_rate": 8.55691986857733e-06, | |
| "loss": 0.4939, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27817365527153015, | |
| "step": 1135, | |
| "valid_targets_mean": 1354.3, | |
| "valid_targets_min": 280 | |
| }, | |
| { | |
| "epoch": 3.6432, | |
| "grad_norm": 0.674882379275675, | |
| "learning_rate": 8.374641074210979e-06, | |
| "loss": 0.5335, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24654847383499146, | |
| "step": 1140, | |
| "valid_targets_mean": 1633.1, | |
| "valid_targets_min": 419 | |
| }, | |
| { | |
| "epoch": 3.6592000000000002, | |
| "grad_norm": 0.5786511976247036, | |
| "learning_rate": 8.193809172494249e-06, | |
| "loss": 0.4861, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26465269923210144, | |
| "step": 1145, | |
| "valid_targets_mean": 2178.4, | |
| "valid_targets_min": 326 | |
| }, | |
| { | |
| "epoch": 3.6752000000000002, | |
| "grad_norm": 1.2780808410764803, | |
| "learning_rate": 8.014446669773061e-06, | |
| "loss": 0.4956, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.32294759154319763, | |
| "step": 1150, | |
| "valid_targets_mean": 1886.1, | |
| "valid_targets_min": 451 | |
| }, | |
| { | |
| "epoch": 3.6912000000000003, | |
| "grad_norm": 0.578774514425232, | |
| "learning_rate": 7.83657588951187e-06, | |
| "loss": 0.493, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.17994293570518494, | |
| "step": 1155, | |
| "valid_targets_mean": 1390.4, | |
| "valid_targets_min": 325 | |
| }, | |
| { | |
| "epoch": 3.7072000000000003, | |
| "grad_norm": 0.5581685197886337, | |
| "learning_rate": 7.66021896951529e-06, | |
| "loss": 0.4667, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23018191754817963, | |
| "step": 1160, | |
| "valid_targets_mean": 1914.8, | |
| "valid_targets_min": 345 | |
| }, | |
| { | |
| "epoch": 3.7232, | |
| "grad_norm": 0.5798400529032423, | |
| "learning_rate": 7.485397859172841e-06, | |
| "loss": 0.4956, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24066895246505737, | |
| "step": 1165, | |
| "valid_targets_mean": 2146.2, | |
| "valid_targets_min": 357 | |
| }, | |
| { | |
| "epoch": 3.7392, | |
| "grad_norm": 0.6978814122379023, | |
| "learning_rate": 7.312134316727093e-06, | |
| "loss": 0.4686, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3223838806152344, | |
| "step": 1170, | |
| "valid_targets_mean": 2134.8, | |
| "valid_targets_min": 335 | |
| }, | |
| { | |
| "epoch": 3.7552, | |
| "grad_norm": 0.6891476624508007, | |
| "learning_rate": 7.140449906565656e-06, | |
| "loss": 0.4748, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21297484636306763, | |
| "step": 1175, | |
| "valid_targets_mean": 1297.0, | |
| "valid_targets_min": 297 | |
| }, | |
| { | |
| "epoch": 3.7712, | |
| "grad_norm": 0.5766188293973691, | |
| "learning_rate": 6.970365996537285e-06, | |
| "loss": 0.5051, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.281222939491272, | |
| "step": 1180, | |
| "valid_targets_mean": 2315.5, | |
| "valid_targets_min": 339 | |
| }, | |
| { | |
| "epoch": 3.7872, | |
| "grad_norm": 0.7046210585998732, | |
| "learning_rate": 6.801903755292403e-06, | |
| "loss": 0.5273, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3093089163303375, | |
| "step": 1185, | |
| "valid_targets_mean": 1396.1, | |
| "valid_targets_min": 342 | |
| }, | |
| { | |
| "epoch": 3.8032, | |
| "grad_norm": 0.4376995996724134, | |
| "learning_rate": 6.635084149648481e-06, | |
| "loss": 0.4823, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22248825430870056, | |
| "step": 1190, | |
| "valid_targets_mean": 3229.7, | |
| "valid_targets_min": 391 | |
| }, | |
| { | |
| "epoch": 3.8192, | |
| "grad_norm": 0.5131056511031186, | |
| "learning_rate": 6.469927941980483e-06, | |
| "loss": 0.4805, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.18466554582118988, | |
| "step": 1195, | |
| "valid_targets_mean": 1582.9, | |
| "valid_targets_min": 518 | |
| }, | |
| { | |
| "epoch": 3.8352, | |
| "grad_norm": 0.6452312408364583, | |
| "learning_rate": 6.30645568763681e-06, | |
| "loss": 0.496, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21880459785461426, | |
| "step": 1200, | |
| "valid_targets_mean": 1228.4, | |
| "valid_targets_min": 432 | |
| }, | |
| { | |
| "epoch": 3.8512, | |
| "grad_norm": 0.5380601141479627, | |
| "learning_rate": 6.144687732380963e-06, | |
| "loss": 0.5304, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2577124834060669, | |
| "step": 1205, | |
| "valid_targets_mean": 2379.2, | |
| "valid_targets_min": 388 | |
| }, | |
| { | |
| "epoch": 3.8672, | |
| "grad_norm": 0.5748210234616867, | |
| "learning_rate": 5.9846442098592895e-06, | |
| "loss": 0.4651, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2521425485610962, | |
| "step": 1210, | |
| "valid_targets_mean": 2123.4, | |
| "valid_targets_min": 322 | |
| }, | |
| { | |
| "epoch": 3.8832, | |
| "grad_norm": 0.48438796850734583, | |
| "learning_rate": 5.826345039095178e-06, | |
| "loss": 0.4601, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1812637746334076, | |
| "step": 1215, | |
| "valid_targets_mean": 2076.3, | |
| "valid_targets_min": 300 | |
| }, | |
| { | |
| "epoch": 3.8992, | |
| "grad_norm": 0.621110295182322, | |
| "learning_rate": 5.669809922009937e-06, | |
| "loss": 0.4993, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2818862795829773, | |
| "step": 1220, | |
| "valid_targets_mean": 1673.2, | |
| "valid_targets_min": 279 | |
| }, | |
| { | |
| "epoch": 3.9152, | |
| "grad_norm": 0.5555884163185787, | |
| "learning_rate": 5.515058340970665e-06, | |
| "loss": 0.4303, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2184838205575943, | |
| "step": 1225, | |
| "valid_targets_mean": 1824.2, | |
| "valid_targets_min": 427 | |
| }, | |
| { | |
| "epoch": 3.9312, | |
| "grad_norm": 0.6647038173757377, | |
| "learning_rate": 5.362109556365496e-06, | |
| "loss": 0.4948, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.27975088357925415, | |
| "step": 1230, | |
| "valid_targets_mean": 1639.4, | |
| "valid_targets_min": 317 | |
| }, | |
| { | |
| "epoch": 3.9472, | |
| "grad_norm": 0.5328762513947399, | |
| "learning_rate": 5.2109826042064445e-06, | |
| "loss": 0.501, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1903163492679596, | |
| "step": 1235, | |
| "valid_targets_mean": 1561.6, | |
| "valid_targets_min": 333 | |
| }, | |
| { | |
| "epoch": 3.9632, | |
| "grad_norm": 0.5637610399979923, | |
| "learning_rate": 5.0616962937601945e-06, | |
| "loss": 0.4565, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2709476351737976, | |
| "step": 1240, | |
| "valid_targets_mean": 2084.4, | |
| "valid_targets_min": 356 | |
| }, | |
| { | |
| "epoch": 3.9792, | |
| "grad_norm": 0.6594116088116377, | |
| "learning_rate": 4.914269205207076e-06, | |
| "loss": 0.5223, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28497931361198425, | |
| "step": 1245, | |
| "valid_targets_mean": 1907.8, | |
| "valid_targets_min": 430 | |
| }, | |
| { | |
| "epoch": 3.9952, | |
| "grad_norm": 0.5117690023196643, | |
| "learning_rate": 4.76871968732858e-06, | |
| "loss": 0.4969, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1631999909877777, | |
| "step": 1250, | |
| "valid_targets_mean": 1486.6, | |
| "valid_targets_min": 341 | |
| }, | |
| { | |
| "epoch": 4.0096, | |
| "grad_norm": 0.588478619060525, | |
| "learning_rate": 4.625065855223689e-06, | |
| "loss": 0.4874, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20968790352344513, | |
| "step": 1255, | |
| "valid_targets_mean": 1597.4, | |
| "valid_targets_min": 281 | |
| }, | |
| { | |
| "epoch": 4.0256, | |
| "grad_norm": 0.6688803497932884, | |
| "learning_rate": 4.483325588054259e-06, | |
| "loss": 0.5404, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3303160071372986, | |
| "step": 1260, | |
| "valid_targets_mean": 1702.8, | |
| "valid_targets_min": 335 | |
| }, | |
| { | |
| "epoch": 4.0416, | |
| "grad_norm": 0.5399541583183427, | |
| "learning_rate": 4.343516526819755e-06, | |
| "loss": 0.4956, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2031438946723938, | |
| "step": 1265, | |
| "valid_targets_mean": 1462.2, | |
| "valid_targets_min": 304 | |
| }, | |
| { | |
| "epoch": 4.0576, | |
| "grad_norm": 0.6468614707926869, | |
| "learning_rate": 4.205656072161681e-06, | |
| "loss": 0.495, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26288682222366333, | |
| "step": 1270, | |
| "valid_targets_mean": 1628.1, | |
| "valid_targets_min": 380 | |
| }, | |
| { | |
| "epoch": 4.0736, | |
| "grad_norm": 0.5256140068386081, | |
| "learning_rate": 4.069761382197901e-06, | |
| "loss": 0.475, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.25582724809646606, | |
| "step": 1275, | |
| "valid_targets_mean": 2535.1, | |
| "valid_targets_min": 349 | |
| }, | |
| { | |
| "epoch": 4.0896, | |
| "grad_norm": 0.6098246142019474, | |
| "learning_rate": 3.935849370387104e-06, | |
| "loss": 0.4816, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1949392855167389, | |
| "step": 1280, | |
| "valid_targets_mean": 1631.6, | |
| "valid_targets_min": 387 | |
| }, | |
| { | |
| "epoch": 4.1056, | |
| "grad_norm": 0.5308571227734422, | |
| "learning_rate": 3.803936703423783e-06, | |
| "loss": 0.4457, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.18933570384979248, | |
| "step": 1285, | |
| "valid_targets_mean": 1489.1, | |
| "valid_targets_min": 325 | |
| }, | |
| { | |
| "epoch": 4.1216, | |
| "grad_norm": 0.5524734164128581, | |
| "learning_rate": 3.6740397991638864e-06, | |
| "loss": 0.5034, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2136593461036682, | |
| "step": 1290, | |
| "valid_targets_mean": 1708.4, | |
| "valid_targets_min": 518 | |
| }, | |
| { | |
| "epoch": 4.1376, | |
| "grad_norm": 0.61360857545027, | |
| "learning_rate": 3.5461748245814633e-06, | |
| "loss": 0.4722, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20733609795570374, | |
| "step": 1295, | |
| "valid_targets_mean": 1533.6, | |
| "valid_targets_min": 397 | |
| }, | |
| { | |
| "epoch": 4.1536, | |
| "grad_norm": 0.5366139131953872, | |
| "learning_rate": 3.420357693756502e-06, | |
| "loss": 0.478, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21171560883522034, | |
| "step": 1300, | |
| "valid_targets_mean": 1925.5, | |
| "valid_targets_min": 377 | |
| }, | |
| { | |
| "epoch": 4.1696, | |
| "grad_norm": 0.5289765618034591, | |
| "learning_rate": 3.2966040658942666e-06, | |
| "loss": 0.4429, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21857257187366486, | |
| "step": 1305, | |
| "valid_targets_mean": 1886.8, | |
| "valid_targets_min": 534 | |
| }, | |
| { | |
| "epoch": 4.1856, | |
| "grad_norm": 0.6393062442875381, | |
| "learning_rate": 3.174929343376374e-06, | |
| "loss": 0.4821, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3062552809715271, | |
| "step": 1310, | |
| "valid_targets_mean": 1378.6, | |
| "valid_targets_min": 435 | |
| }, | |
| { | |
| "epoch": 4.2016, | |
| "grad_norm": 0.5996859525434625, | |
| "learning_rate": 3.055348669843794e-06, | |
| "loss": 0.5102, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26755791902542114, | |
| "step": 1315, | |
| "valid_targets_mean": 2039.4, | |
| "valid_targets_min": 345 | |
| }, | |
| { | |
| "epoch": 4.2176, | |
| "grad_norm": 0.6909585409804595, | |
| "learning_rate": 2.937876928312062e-06, | |
| "loss": 0.4501, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22569231688976288, | |
| "step": 1320, | |
| "valid_targets_mean": 1276.2, | |
| "valid_targets_min": 391 | |
| }, | |
| { | |
| "epoch": 4.2336, | |
| "grad_norm": 0.5749450658990035, | |
| "learning_rate": 2.8225287393189547e-06, | |
| "loss": 0.4777, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21987636387348175, | |
| "step": 1325, | |
| "valid_targets_mean": 1859.2, | |
| "valid_targets_min": 319 | |
| }, | |
| { | |
| "epoch": 4.2496, | |
| "grad_norm": 0.5564974655508327, | |
| "learning_rate": 2.709318459104815e-06, | |
| "loss": 0.4748, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2506256103515625, | |
| "step": 1330, | |
| "valid_targets_mean": 1760.2, | |
| "valid_targets_min": 348 | |
| }, | |
| { | |
| "epoch": 4.2656, | |
| "grad_norm": 0.5582896856771299, | |
| "learning_rate": 2.5982601778257733e-06, | |
| "loss": 0.4758, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23432789742946625, | |
| "step": 1335, | |
| "valid_targets_mean": 2037.2, | |
| "valid_targets_min": 385 | |
| }, | |
| { | |
| "epoch": 4.2816, | |
| "grad_norm": 0.7875568747023372, | |
| "learning_rate": 2.4893677178000797e-06, | |
| "loss": 0.4939, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.18148024380207062, | |
| "step": 1340, | |
| "valid_targets_mean": 1209.4, | |
| "valid_targets_min": 318 | |
| }, | |
| { | |
| "epoch": 4.2976, | |
| "grad_norm": 0.6498155393636108, | |
| "learning_rate": 2.3826546317877795e-06, | |
| "loss": 0.4767, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2270783632993698, | |
| "step": 1345, | |
| "valid_targets_mean": 1611.4, | |
| "valid_targets_min": 298 | |
| }, | |
| { | |
| "epoch": 4.3136, | |
| "grad_norm": 0.6031775785651343, | |
| "learning_rate": 2.278134201303952e-06, | |
| "loss": 0.4819, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26071587204933167, | |
| "step": 1350, | |
| "valid_targets_mean": 1981.7, | |
| "valid_targets_min": 392 | |
| }, | |
| { | |
| "epoch": 4.3296, | |
| "grad_norm": 0.5219971003543084, | |
| "learning_rate": 2.1758194349656624e-06, | |
| "loss": 0.4695, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1580902636051178, | |
| "step": 1355, | |
| "valid_targets_mean": 1376.2, | |
| "valid_targets_min": 556 | |
| }, | |
| { | |
| "epoch": 4.3456, | |
| "grad_norm": 0.5607581237014677, | |
| "learning_rate": 2.075723066872939e-06, | |
| "loss": 0.4837, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22719267010688782, | |
| "step": 1360, | |
| "valid_targets_mean": 2015.3, | |
| "valid_targets_min": 448 | |
| }, | |
| { | |
| "epoch": 4.3616, | |
| "grad_norm": 0.6577489781624221, | |
| "learning_rate": 1.977857555023854e-06, | |
| "loss": 0.47, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22835250198841095, | |
| "step": 1365, | |
| "valid_targets_mean": 1255.2, | |
| "valid_targets_min": 356 | |
| }, | |
| { | |
| "epoch": 4.3776, | |
| "grad_norm": 0.5483604102472543, | |
| "learning_rate": 1.8822350797640543e-06, | |
| "loss": 0.5029, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21087878942489624, | |
| "step": 1370, | |
| "valid_targets_mean": 2087.0, | |
| "valid_targets_min": 333 | |
| }, | |
| { | |
| "epoch": 4.3936, | |
| "grad_norm": 0.5584004157015625, | |
| "learning_rate": 1.788867542270729e-06, | |
| "loss": 0.4959, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.18049772083759308, | |
| "step": 1375, | |
| "valid_targets_mean": 1521.5, | |
| "valid_targets_min": 281 | |
| }, | |
| { | |
| "epoch": 4.4096, | |
| "grad_norm": 0.5965786396760591, | |
| "learning_rate": 1.6977665630714345e-06, | |
| "loss": 0.5086, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.31033411622047424, | |
| "step": 1380, | |
| "valid_targets_mean": 2062.2, | |
| "valid_targets_min": 355 | |
| }, | |
| { | |
| "epoch": 4.4256, | |
| "grad_norm": 0.6673612634535888, | |
| "learning_rate": 1.6089434805977799e-06, | |
| "loss": 0.5048, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24712365865707397, | |
| "step": 1385, | |
| "valid_targets_mean": 1506.6, | |
| "valid_targets_min": 290 | |
| }, | |
| { | |
| "epoch": 4.4416, | |
| "grad_norm": 0.5945936927139228, | |
| "learning_rate": 1.5224093497742654e-06, | |
| "loss": 0.4606, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2055821716785431, | |
| "step": 1390, | |
| "valid_targets_mean": 1827.1, | |
| "valid_targets_min": 314 | |
| }, | |
| { | |
| "epoch": 4.4576, | |
| "grad_norm": 0.5931889080079704, | |
| "learning_rate": 1.4381749406423695e-06, | |
| "loss": 0.4659, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20388510823249817, | |
| "step": 1395, | |
| "valid_targets_mean": 1576.4, | |
| "valid_targets_min": 314 | |
| }, | |
| { | |
| "epoch": 4.4736, | |
| "grad_norm": 0.5702080320890669, | |
| "learning_rate": 1.3562507370201062e-06, | |
| "loss": 0.4862, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20522195100784302, | |
| "step": 1400, | |
| "valid_targets_mean": 1762.7, | |
| "valid_targets_min": 440 | |
| }, | |
| { | |
| "epoch": 4.4896, | |
| "grad_norm": 0.51709895249062, | |
| "learning_rate": 1.2766469351972345e-06, | |
| "loss": 0.4671, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21051155030727386, | |
| "step": 1405, | |
| "valid_targets_mean": 2941.2, | |
| "valid_targets_min": 392 | |
| }, | |
| { | |
| "epoch": 4.5056, | |
| "grad_norm": 0.5768659669817562, | |
| "learning_rate": 1.1993734426661985e-06, | |
| "loss": 0.4873, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.296686053276062, | |
| "step": 1410, | |
| "valid_targets_mean": 2315.2, | |
| "valid_targets_min": 343 | |
| }, | |
| { | |
| "epoch": 4.5216, | |
| "grad_norm": 0.6485394364674811, | |
| "learning_rate": 1.1244398768890496e-06, | |
| "loss": 0.4717, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.20520901679992676, | |
| "step": 1415, | |
| "valid_targets_mean": 1496.0, | |
| "valid_targets_min": 506 | |
| }, | |
| { | |
| "epoch": 4.5376, | |
| "grad_norm": 0.5979604272412385, | |
| "learning_rate": 1.0518555641004613e-06, | |
| "loss": 0.4931, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.22371701896190643, | |
| "step": 1420, | |
| "valid_targets_mean": 1729.9, | |
| "valid_targets_min": 331 | |
| }, | |
| { | |
| "epoch": 4.5536, | |
| "grad_norm": 0.5263779500785178, | |
| "learning_rate": 9.816295381469954e-07, | |
| "loss": 0.4825, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21612754464149475, | |
| "step": 1425, | |
| "valid_targets_mean": 2349.2, | |
| "valid_targets_min": 357 | |
| }, | |
| { | |
| "epoch": 4.5696, | |
| "grad_norm": 0.7000615524944602, | |
| "learning_rate": 9.137705393627239e-07, | |
| "loss": 0.5133, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3547956347465515, | |
| "step": 1430, | |
| "valid_targets_mean": 1804.9, | |
| "valid_targets_min": 357 | |
| }, | |
| { | |
| "epoch": 4.5856, | |
| "grad_norm": 0.6210141087655107, | |
| "learning_rate": 8.482870134814214e-07, | |
| "loss": 0.4863, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.17804865539073944, | |
| "step": 1435, | |
| "valid_targets_mean": 1320.4, | |
| "valid_targets_min": 313 | |
| }, | |
| { | |
| "epoch": 4.6016, | |
| "grad_norm": 0.6807187225558066, | |
| "learning_rate": 7.851871105854125e-07, | |
| "loss": 0.4884, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2696976363658905, | |
| "step": 1440, | |
| "valid_targets_mean": 1188.7, | |
| "valid_targets_min": 412 | |
| }, | |
| { | |
| "epoch": 4.6176, | |
| "grad_norm": 0.53552893705078, | |
| "learning_rate": 7.244786840912033e-07, | |
| "loss": 0.4836, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24118053913116455, | |
| "step": 1445, | |
| "valid_targets_mean": 2352.5, | |
| "valid_targets_min": 405 | |
| }, | |
| { | |
| "epoch": 4.6336, | |
| "grad_norm": 0.6337208850641045, | |
| "learning_rate": 6.661692897720517e-07, | |
| "loss": 0.4914, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.17234866321086884, | |
| "step": 1450, | |
| "valid_targets_mean": 1406.1, | |
| "valid_targets_min": 357 | |
| }, | |
| { | |
| "epoch": 4.6495999999999995, | |
| "grad_norm": 0.6101255059824036, | |
| "learning_rate": 6.10266184817565e-07, | |
| "loss": 0.4927, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.19647923111915588, | |
| "step": 1455, | |
| "valid_targets_mean": 1735.9, | |
| "valid_targets_min": 355 | |
| }, | |
| { | |
| "epoch": 4.6655999999999995, | |
| "grad_norm": 0.4819947661898462, | |
| "learning_rate": 5.567763269304927e-07, | |
| "loss": 0.4638, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21751366555690765, | |
| "step": 1460, | |
| "valid_targets_mean": 2789.0, | |
| "valid_targets_min": 291 | |
| }, | |
| { | |
| "epoch": 4.6815999999999995, | |
| "grad_norm": 0.5156663225811347, | |
| "learning_rate": 5.057063734607392e-07, | |
| "loss": 0.4739, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21621708571910858, | |
| "step": 1465, | |
| "valid_targets_mean": 2056.8, | |
| "valid_targets_min": 324 | |
| }, | |
| { | |
| "epoch": 4.6975999999999996, | |
| "grad_norm": 0.6229413977979558, | |
| "learning_rate": 4.570626805768119e-07, | |
| "loss": 0.5046, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.1690235286951065, | |
| "step": 1470, | |
| "valid_targets_mean": 1181.2, | |
| "valid_targets_min": 367 | |
| }, | |
| { | |
| "epoch": 4.7136, | |
| "grad_norm": 0.6074858890034005, | |
| "learning_rate": 4.1085130247472625e-07, | |
| "loss": 0.5082, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3121945261955261, | |
| "step": 1475, | |
| "valid_targets_mean": 2018.7, | |
| "valid_targets_min": 419 | |
| }, | |
| { | |
| "epoch": 4.7296, | |
| "grad_norm": 0.6662140399608273, | |
| "learning_rate": 3.670779906244981e-07, | |
| "loss": 0.5173, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.29140985012054443, | |
| "step": 1480, | |
| "valid_targets_mean": 1729.5, | |
| "valid_targets_min": 325 | |
| }, | |
| { | |
| "epoch": 4.7456, | |
| "grad_norm": 0.5457221604306459, | |
| "learning_rate": 3.2574819305432713e-07, | |
| "loss": 0.4817, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2259424924850464, | |
| "step": 1485, | |
| "valid_targets_mean": 1967.8, | |
| "valid_targets_min": 326 | |
| }, | |
| { | |
| "epoch": 4.7616, | |
| "grad_norm": 0.6106976497041675, | |
| "learning_rate": 2.8686705367250824e-07, | |
| "loss": 0.4943, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24830885231494904, | |
| "step": 1490, | |
| "valid_targets_mean": 1953.1, | |
| "valid_targets_min": 312 | |
| }, | |
| { | |
| "epoch": 4.7776, | |
| "grad_norm": 0.6255356739675524, | |
| "learning_rate": 2.504394116272502e-07, | |
| "loss": 0.4806, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2968481481075287, | |
| "step": 1495, | |
| "valid_targets_mean": 1871.2, | |
| "valid_targets_min": 523 | |
| }, | |
| { | |
| "epoch": 4.7936, | |
| "grad_norm": 0.6589254098737786, | |
| "learning_rate": 2.1646980070437973e-07, | |
| "loss": 0.5014, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.3000338077545166, | |
| "step": 1500, | |
| "valid_targets_mean": 1585.0, | |
| "valid_targets_min": 354 | |
| }, | |
| { | |
| "epoch": 4.8096, | |
| "grad_norm": 0.518598123392684, | |
| "learning_rate": 1.8496244876306858e-07, | |
| "loss": 0.5038, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.221224844455719, | |
| "step": 1505, | |
| "valid_targets_mean": 2037.6, | |
| "valid_targets_min": 374 | |
| }, | |
| { | |
| "epoch": 4.8256, | |
| "grad_norm": 0.6138090448432473, | |
| "learning_rate": 1.559212772096319e-07, | |
| "loss": 0.4764, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.28576716780662537, | |
| "step": 1510, | |
| "valid_targets_mean": 1825.8, | |
| "valid_targets_min": 361 | |
| }, | |
| { | |
| "epoch": 4.8416, | |
| "grad_norm": 0.652966725148509, | |
| "learning_rate": 1.2934990050947228e-07, | |
| "loss": 0.4886, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26586198806762695, | |
| "step": 1515, | |
| "valid_targets_mean": 1465.1, | |
| "valid_targets_min": 375 | |
| }, | |
| { | |
| "epoch": 4.8576, | |
| "grad_norm": 0.6400822819669492, | |
| "learning_rate": 1.0525162573723269e-07, | |
| "loss": 0.503, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.24716216325759888, | |
| "step": 1520, | |
| "valid_targets_mean": 1512.1, | |
| "valid_targets_min": 340 | |
| }, | |
| { | |
| "epoch": 4.8736, | |
| "grad_norm": 0.6113920421093133, | |
| "learning_rate": 8.362945216517704e-08, | |
| "loss": 0.4663, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2500142455101013, | |
| "step": 1525, | |
| "valid_targets_mean": 2119.6, | |
| "valid_targets_min": 372 | |
| }, | |
| { | |
| "epoch": 4.8896, | |
| "grad_norm": 0.6483707678802277, | |
| "learning_rate": 6.448607088991532e-08, | |
| "loss": 0.4717, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23023313283920288, | |
| "step": 1530, | |
| "valid_targets_mean": 1914.2, | |
| "valid_targets_min": 437 | |
| }, | |
| { | |
| "epoch": 4.9056, | |
| "grad_norm": 1.987139130172234, | |
| "learning_rate": 4.782386449746934e-08, | |
| "loss": 0.4741, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.21392463147640228, | |
| "step": 1535, | |
| "valid_targets_mean": 1327.4, | |
| "valid_targets_min": 355 | |
| }, | |
| { | |
| "epoch": 4.9216, | |
| "grad_norm": 0.44902958316814084, | |
| "learning_rate": 3.3644906766734374e-08, | |
| "loss": 0.4651, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23930823802947998, | |
| "step": 1540, | |
| "valid_targets_mean": 2811.7, | |
| "valid_targets_min": 348 | |
| }, | |
| { | |
| "epoch": 4.9376, | |
| "grad_norm": 0.6906216602944996, | |
| "learning_rate": 2.1950962411367848e-08, | |
| "loss": 0.4758, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.26459425687789917, | |
| "step": 1545, | |
| "valid_targets_mean": 1283.7, | |
| "valid_targets_min": 328 | |
| }, | |
| { | |
| "epoch": 4.9536, | |
| "grad_norm": 0.5262684318208516, | |
| "learning_rate": 1.2743486860165022e-08, | |
| "loss": 0.4813, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.222878098487854, | |
| "step": 1550, | |
| "valid_targets_mean": 2221.9, | |
| "valid_targets_min": 327 | |
| }, | |
| { | |
| "epoch": 4.9696, | |
| "grad_norm": 0.7224147286652457, | |
| "learning_rate": 6.023626075915001e-09, | |
| "loss": 0.4711, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.2481645792722702, | |
| "step": 1555, | |
| "valid_targets_mean": 1469.6, | |
| "valid_targets_min": 332 | |
| }, | |
| { | |
| "epoch": 4.9856, | |
| "grad_norm": 0.5387438057978385, | |
| "learning_rate": 1.7922164127659457e-09, | |
| "loss": 0.48, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.23242776095867157, | |
| "step": 1560, | |
| "valid_targets_mean": 2196.2, | |
| "valid_targets_min": 419 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.9868988589870178, | |
| "learning_rate": 4.978451213499824e-11, | |
| "loss": 0.5021, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.4729985296726227, | |
| "step": 1565, | |
| "valid_targets_mean": 1730.2, | |
| "valid_targets_min": 414 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "loss_nan_ranks": 0, | |
| "loss_rank_avg": 0.4729985296726227, | |
| "step": 1565, | |
| "total_flos": 3.9063951723462656e+17, | |
| "train_loss": 0.5453641192219889, | |
| "train_runtime": 10987.534, | |
| "train_samples_per_second": 4.551, | |
| "train_steps_per_second": 0.142, | |
| "valid_targets_mean": 1730.2, | |
| "valid_targets_min": 414 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1565, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.9063951723462656e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |