| { | |
| "best_global_step": 169336, | |
| "best_metric": 0.5241418199760344, | |
| "best_model_checkpoint": "modernbert-heritage-saliency/checkpoint-169336", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 254004, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005905418812302168, | |
| "grad_norm": 6.885669708251953, | |
| "learning_rate": 1.9960866758003812e-05, | |
| "loss": 0.778, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.011810837624604336, | |
| "grad_norm": 7.079469203948975, | |
| "learning_rate": 1.992149729925513e-05, | |
| "loss": 0.663, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.017716256436906504, | |
| "grad_norm": 6.217828273773193, | |
| "learning_rate": 1.988212784050645e-05, | |
| "loss": 0.6988, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.023621675249208673, | |
| "grad_norm": 3.856297016143799, | |
| "learning_rate": 1.984275838175777e-05, | |
| "loss": 0.6711, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.029527094061510842, | |
| "grad_norm": 3.044447660446167, | |
| "learning_rate": 1.9803388923009087e-05, | |
| "loss": 0.6287, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.03543251287381301, | |
| "grad_norm": 2.8974101543426514, | |
| "learning_rate": 1.9764019464260406e-05, | |
| "loss": 0.6121, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.04133793168611518, | |
| "grad_norm": 11.309990882873535, | |
| "learning_rate": 1.972488622226422e-05, | |
| "loss": 0.6392, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.047243350498417346, | |
| "grad_norm": 3.9195759296417236, | |
| "learning_rate": 1.9685516763515536e-05, | |
| "loss": 0.6841, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.05314876931071952, | |
| "grad_norm": 0.9045548439025879, | |
| "learning_rate": 1.9646147304766856e-05, | |
| "loss": 0.6066, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.059054188123021684, | |
| "grad_norm": 2.262979030609131, | |
| "learning_rate": 1.9606777846018175e-05, | |
| "loss": 0.5993, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.06495960693532385, | |
| "grad_norm": 3.5913796424865723, | |
| "learning_rate": 1.9567408387269495e-05, | |
| "loss": 0.5916, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.07086502574762601, | |
| "grad_norm": 0.9315924048423767, | |
| "learning_rate": 1.952803892852081e-05, | |
| "loss": 0.6104, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.0767704445599282, | |
| "grad_norm": 3.631446123123169, | |
| "learning_rate": 1.948866946977213e-05, | |
| "loss": 0.5821, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.08267586337223036, | |
| "grad_norm": 3.5221781730651855, | |
| "learning_rate": 1.944930001102345e-05, | |
| "loss": 0.5843, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.08858128218453253, | |
| "grad_norm": 1.012416958808899, | |
| "learning_rate": 1.940993055227477e-05, | |
| "loss": 0.6374, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.09448670099683469, | |
| "grad_norm": 1.7101526260375977, | |
| "learning_rate": 1.9370718571361084e-05, | |
| "loss": 0.5932, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.10039211980913687, | |
| "grad_norm": 3.8092918395996094, | |
| "learning_rate": 1.93313491126124e-05, | |
| "loss": 0.5771, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.10629753862143904, | |
| "grad_norm": 1.8706109523773193, | |
| "learning_rate": 1.929197965386372e-05, | |
| "loss": 0.6001, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.1122029574337412, | |
| "grad_norm": 10.529691696166992, | |
| "learning_rate": 1.925261019511504e-05, | |
| "loss": 0.5962, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.11810837624604337, | |
| "grad_norm": 1.8136720657348633, | |
| "learning_rate": 1.921324073636636e-05, | |
| "loss": 0.6957, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.12401379505834553, | |
| "grad_norm": 0.7268733382225037, | |
| "learning_rate": 1.9173871277617678e-05, | |
| "loss": 0.7129, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.1299192138706477, | |
| "grad_norm": 3.032724142074585, | |
| "learning_rate": 1.9134501818868997e-05, | |
| "loss": 0.6435, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.13582463268294986, | |
| "grad_norm": 1.1200387477874756, | |
| "learning_rate": 1.9095132360120313e-05, | |
| "loss": 0.6221, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.14173005149525203, | |
| "grad_norm": 2.7086682319641113, | |
| "learning_rate": 1.9055762901371633e-05, | |
| "loss": 0.6087, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.14763547030755422, | |
| "grad_norm": 1.4301010370254517, | |
| "learning_rate": 1.9016393442622952e-05, | |
| "loss": 0.5724, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.1535408891198564, | |
| "grad_norm": 3.6675240993499756, | |
| "learning_rate": 1.8977023983874272e-05, | |
| "loss": 0.5667, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.15944630793215855, | |
| "grad_norm": 0.9690624475479126, | |
| "learning_rate": 1.893765452512559e-05, | |
| "loss": 0.5825, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.16535172674446072, | |
| "grad_norm": 21.52179527282715, | |
| "learning_rate": 1.8898285066376907e-05, | |
| "loss": 0.5963, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.17125714555676289, | |
| "grad_norm": 2.331463575363159, | |
| "learning_rate": 1.8858915607628227e-05, | |
| "loss": 0.5802, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.17716256436906505, | |
| "grad_norm": 11.624436378479004, | |
| "learning_rate": 1.8819546148879546e-05, | |
| "loss": 0.5931, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.18306798318136722, | |
| "grad_norm": 10.969076156616211, | |
| "learning_rate": 1.8780176690130866e-05, | |
| "loss": 0.5779, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.18897340199366938, | |
| "grad_norm": 17.681808471679688, | |
| "learning_rate": 1.874088597029968e-05, | |
| "loss": 0.5822, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.19487882080597155, | |
| "grad_norm": 3.5414836406707764, | |
| "learning_rate": 1.8701516511551e-05, | |
| "loss": 0.5749, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.20078423961827374, | |
| "grad_norm": 13.94709587097168, | |
| "learning_rate": 1.866214705280232e-05, | |
| "loss": 0.6799, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.2066896584305759, | |
| "grad_norm": 1.3140314817428589, | |
| "learning_rate": 1.8622856332971136e-05, | |
| "loss": 0.6906, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.21259507724287807, | |
| "grad_norm": 0.7483753561973572, | |
| "learning_rate": 1.858356561313995e-05, | |
| "loss": 0.7139, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.21850049605518024, | |
| "grad_norm": 6.807902812957764, | |
| "learning_rate": 1.854419615439127e-05, | |
| "loss": 0.6635, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.2244059148674824, | |
| "grad_norm": 21.158859252929688, | |
| "learning_rate": 1.850482669564259e-05, | |
| "loss": 0.7046, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.23031133367978457, | |
| "grad_norm": 0.47579389810562134, | |
| "learning_rate": 1.846545723689391e-05, | |
| "loss": 0.6757, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.23621675249208673, | |
| "grad_norm": 14.090885162353516, | |
| "learning_rate": 1.842608777814523e-05, | |
| "loss": 0.7157, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.2421221713043889, | |
| "grad_norm": 5.890013217926025, | |
| "learning_rate": 1.8386797058314044e-05, | |
| "loss": 0.6787, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.24802759011669107, | |
| "grad_norm": 17.978776931762695, | |
| "learning_rate": 1.834742759956536e-05, | |
| "loss": 0.6719, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.25393300892899323, | |
| "grad_norm": 19.15386962890625, | |
| "learning_rate": 1.8308058140816683e-05, | |
| "loss": 0.6684, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.2598384277412954, | |
| "grad_norm": 14.03354549407959, | |
| "learning_rate": 1.8268688682068e-05, | |
| "loss": 0.7036, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.26574384655359756, | |
| "grad_norm": 1.1214927434921265, | |
| "learning_rate": 1.8229397962236818e-05, | |
| "loss": 0.6637, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.27164926536589973, | |
| "grad_norm": 0.4743577241897583, | |
| "learning_rate": 1.8190028503488134e-05, | |
| "loss": 0.6966, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.2775546841782019, | |
| "grad_norm": 14.990901947021484, | |
| "learning_rate": 1.815073778365695e-05, | |
| "loss": 0.6642, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.28346010299050406, | |
| "grad_norm": 1.3651882410049438, | |
| "learning_rate": 1.8111368324908272e-05, | |
| "loss": 0.6331, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.2893655218028063, | |
| "grad_norm": 5.043566703796387, | |
| "learning_rate": 1.8071998866159592e-05, | |
| "loss": 0.657, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.29527094061510845, | |
| "grad_norm": 17.113506317138672, | |
| "learning_rate": 1.8032629407410908e-05, | |
| "loss": 0.6101, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.3011763594274106, | |
| "grad_norm": 12.973406791687012, | |
| "learning_rate": 1.7993259948662227e-05, | |
| "loss": 0.648, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.3070817782397128, | |
| "grad_norm": 1.021547555923462, | |
| "learning_rate": 1.7953890489913547e-05, | |
| "loss": 0.609, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.31298719705201494, | |
| "grad_norm": 1.919687271118164, | |
| "learning_rate": 1.7914521031164863e-05, | |
| "loss": 0.5473, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.3188926158643171, | |
| "grad_norm": 1.6815394163131714, | |
| "learning_rate": 1.7875151572416182e-05, | |
| "loss": 0.5526, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.3247980346766193, | |
| "grad_norm": 11.55897045135498, | |
| "learning_rate": 1.7835782113667502e-05, | |
| "loss": 0.5738, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.33070345348892144, | |
| "grad_norm": 5.713680267333984, | |
| "learning_rate": 1.779641265491882e-05, | |
| "loss": 0.5765, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.3366088723012236, | |
| "grad_norm": 6.03076696395874, | |
| "learning_rate": 1.775704319617014e-05, | |
| "loss": 0.622, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.34251429111352577, | |
| "grad_norm": 3.5472631454467773, | |
| "learning_rate": 1.771767373742146e-05, | |
| "loss": 0.5828, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.34841970992582794, | |
| "grad_norm": 5.590924263000488, | |
| "learning_rate": 1.7678383017590276e-05, | |
| "loss": 0.595, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.3543251287381301, | |
| "grad_norm": 7.295816421508789, | |
| "learning_rate": 1.7639013558841592e-05, | |
| "loss": 0.5718, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.36023054755043227, | |
| "grad_norm": 0.5159268379211426, | |
| "learning_rate": 1.7599644100092915e-05, | |
| "loss": 0.5478, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.36613596636273443, | |
| "grad_norm": 116.56455993652344, | |
| "learning_rate": 1.7560274641344234e-05, | |
| "loss": 0.5919, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.3720413851750366, | |
| "grad_norm": 13.48291301727295, | |
| "learning_rate": 1.752090518259555e-05, | |
| "loss": 0.5718, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.37794680398733876, | |
| "grad_norm": 11.179370880126953, | |
| "learning_rate": 1.748153572384687e-05, | |
| "loss": 0.6018, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.38385222279964093, | |
| "grad_norm": 13.461835861206055, | |
| "learning_rate": 1.744216626509819e-05, | |
| "loss": 0.5671, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.3897576416119431, | |
| "grad_norm": 3.035508394241333, | |
| "learning_rate": 1.7402875545267005e-05, | |
| "loss": 0.6002, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.39566306042424526, | |
| "grad_norm": 0.8200084567070007, | |
| "learning_rate": 1.7363506086518324e-05, | |
| "loss": 0.5577, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.4015684792365475, | |
| "grad_norm": 0.5664367079734802, | |
| "learning_rate": 1.7324136627769644e-05, | |
| "loss": 0.5948, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.40747389804884965, | |
| "grad_norm": 1.102321982383728, | |
| "learning_rate": 1.7284767169020963e-05, | |
| "loss": 0.5755, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.4133793168611518, | |
| "grad_norm": 0.8719478845596313, | |
| "learning_rate": 1.724539771027228e-05, | |
| "loss": 0.5754, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.419284735673454, | |
| "grad_norm": 4.414661407470703, | |
| "learning_rate": 1.7206106990441095e-05, | |
| "loss": 0.5768, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.42519015448575614, | |
| "grad_norm": 8.893802642822266, | |
| "learning_rate": 1.7166737531692417e-05, | |
| "loss": 0.558, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.4310955732980583, | |
| "grad_norm": 1.9466989040374756, | |
| "learning_rate": 1.7127368072943737e-05, | |
| "loss": 0.6029, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.4370009921103605, | |
| "grad_norm": 0.6918290853500366, | |
| "learning_rate": 1.7087998614195053e-05, | |
| "loss": 0.5743, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.44290641092266264, | |
| "grad_norm": 0.16082176566123962, | |
| "learning_rate": 1.7048629155446373e-05, | |
| "loss": 0.5468, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.4488118297349648, | |
| "grad_norm": 3.5709893703460693, | |
| "learning_rate": 1.7009259696697692e-05, | |
| "loss": 0.5936, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.454717248547267, | |
| "grad_norm": 4.856642723083496, | |
| "learning_rate": 1.6969890237949008e-05, | |
| "loss": 0.5805, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.46062266735956914, | |
| "grad_norm": 4.781003952026367, | |
| "learning_rate": 1.6930520779200328e-05, | |
| "loss": 0.5854, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.4665280861718713, | |
| "grad_norm": 19.200620651245117, | |
| "learning_rate": 1.6891151320451647e-05, | |
| "loss": 0.591, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.47243350498417347, | |
| "grad_norm": 0.6837007999420166, | |
| "learning_rate": 1.6851781861702967e-05, | |
| "loss": 0.5812, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.47833892379647563, | |
| "grad_norm": 0.7044087648391724, | |
| "learning_rate": 1.6812491141871782e-05, | |
| "loss": 0.5772, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.4842443426087778, | |
| "grad_norm": 8.194652557373047, | |
| "learning_rate": 1.67731216831231e-05, | |
| "loss": 0.5556, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.49014976142107997, | |
| "grad_norm": 4.029700756072998, | |
| "learning_rate": 1.673375222437442e-05, | |
| "loss": 0.5656, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.49605518023338213, | |
| "grad_norm": 2.9741246700286865, | |
| "learning_rate": 1.6694382765625737e-05, | |
| "loss": 0.6014, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.5019605990456844, | |
| "grad_norm": 3.5334889888763428, | |
| "learning_rate": 1.665501330687706e-05, | |
| "loss": 0.5648, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.5078660178579865, | |
| "grad_norm": 0.5247611403465271, | |
| "learning_rate": 1.6615722587045875e-05, | |
| "loss": 0.5546, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.5137714366702887, | |
| "grad_norm": 3.2951791286468506, | |
| "learning_rate": 1.6576353128297195e-05, | |
| "loss": 0.5764, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.5196768554825908, | |
| "grad_norm": 0.4771710932254791, | |
| "learning_rate": 1.653698366954851e-05, | |
| "loss": 0.5742, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.525582274294893, | |
| "grad_norm": 5.949660301208496, | |
| "learning_rate": 1.649761421079983e-05, | |
| "loss": 0.5664, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.5314876931071951, | |
| "grad_norm": 1.1648470163345337, | |
| "learning_rate": 1.645824475205115e-05, | |
| "loss": 0.5612, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.5373931119194973, | |
| "grad_norm": 4.647109031677246, | |
| "learning_rate": 1.641887529330247e-05, | |
| "loss": 0.5693, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.5432985307317995, | |
| "grad_norm": 3.4232962131500244, | |
| "learning_rate": 1.6379663312388784e-05, | |
| "loss": 0.5884, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.5492039495441017, | |
| "grad_norm": 0.6715309619903564, | |
| "learning_rate": 1.63402938536401e-05, | |
| "loss": 0.5734, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.5551093683564038, | |
| "grad_norm": 10.699589729309082, | |
| "learning_rate": 1.630092439489142e-05, | |
| "loss": 0.5842, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.561014787168706, | |
| "grad_norm": 4.567357540130615, | |
| "learning_rate": 1.626155493614274e-05, | |
| "loss": 0.5356, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.5669202059810081, | |
| "grad_norm": 0.851794421672821, | |
| "learning_rate": 1.622218547739406e-05, | |
| "loss": 0.6188, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.5728256247933103, | |
| "grad_norm": 0.710367739200592, | |
| "learning_rate": 1.6182816018645378e-05, | |
| "loss": 0.586, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.5787310436056126, | |
| "grad_norm": 16.63396644592285, | |
| "learning_rate": 1.6143446559896697e-05, | |
| "loss": 0.5638, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.5846364624179147, | |
| "grad_norm": 14.063051223754883, | |
| "learning_rate": 1.6104155840065513e-05, | |
| "loss": 0.5631, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.5905418812302169, | |
| "grad_norm": 2.5714404582977295, | |
| "learning_rate": 1.606478638131683e-05, | |
| "loss": 0.5947, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.596447300042519, | |
| "grad_norm": 2.1997830867767334, | |
| "learning_rate": 1.6025416922568148e-05, | |
| "loss": 0.5288, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.6023527188548212, | |
| "grad_norm": 0.6119571328163147, | |
| "learning_rate": 1.598604746381947e-05, | |
| "loss": 0.6011, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.6082581376671233, | |
| "grad_norm": 13.032819747924805, | |
| "learning_rate": 1.5946678005070787e-05, | |
| "loss": 0.5671, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.6141635564794256, | |
| "grad_norm": 5.521139621734619, | |
| "learning_rate": 1.5907308546322107e-05, | |
| "loss": 0.5946, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.6200689752917277, | |
| "grad_norm": 29.162424087524414, | |
| "learning_rate": 1.5867939087573426e-05, | |
| "loss": 0.5604, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.6259743941040299, | |
| "grad_norm": 4.117544651031494, | |
| "learning_rate": 1.5828569628824742e-05, | |
| "loss": 0.5436, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.631879812916332, | |
| "grad_norm": 0.4661092758178711, | |
| "learning_rate": 1.578927890899356e-05, | |
| "loss": 0.5801, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.6377852317286342, | |
| "grad_norm": 5.056973457336426, | |
| "learning_rate": 1.574990945024488e-05, | |
| "loss": 0.5693, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.6436906505409363, | |
| "grad_norm": 7.182854652404785, | |
| "learning_rate": 1.57105399914962e-05, | |
| "loss": 0.5936, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.6495960693532385, | |
| "grad_norm": 6.386303901672363, | |
| "learning_rate": 1.5671170532747516e-05, | |
| "loss": 0.5751, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.6555014881655407, | |
| "grad_norm": 4.122045040130615, | |
| "learning_rate": 1.5631801073998836e-05, | |
| "loss": 0.5348, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.6614069069778429, | |
| "grad_norm": 0.5306248068809509, | |
| "learning_rate": 1.559251035416765e-05, | |
| "loss": 0.5793, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.667312325790145, | |
| "grad_norm": 5.596012115478516, | |
| "learning_rate": 1.555314089541897e-05, | |
| "loss": 0.5481, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.6732177446024472, | |
| "grad_norm": 0.6556316614151001, | |
| "learning_rate": 1.551377143667029e-05, | |
| "loss": 0.5549, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.6791231634147493, | |
| "grad_norm": 1.0770095586776733, | |
| "learning_rate": 1.547440197792161e-05, | |
| "loss": 0.5888, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.6850285822270515, | |
| "grad_norm": 0.6506383419036865, | |
| "learning_rate": 1.543503251917293e-05, | |
| "loss": 0.5782, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.6909340010393538, | |
| "grad_norm": 5.457004547119141, | |
| "learning_rate": 1.5395663060424245e-05, | |
| "loss": 0.5687, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.6968394198516559, | |
| "grad_norm": 1.7497150897979736, | |
| "learning_rate": 1.5356293601675564e-05, | |
| "loss": 0.5685, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.7027448386639581, | |
| "grad_norm": 2.551649570465088, | |
| "learning_rate": 1.5316924142926884e-05, | |
| "loss": 0.5457, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.7086502574762602, | |
| "grad_norm": 1.2479901313781738, | |
| "learning_rate": 1.5277554684178203e-05, | |
| "loss": 0.5814, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.7145556762885624, | |
| "grad_norm": 2.57185697555542, | |
| "learning_rate": 1.5238185225429523e-05, | |
| "loss": 0.556, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.7204610951008645, | |
| "grad_norm": 3.0134148597717285, | |
| "learning_rate": 1.5198894505598338e-05, | |
| "loss": 0.5646, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.7263665139131668, | |
| "grad_norm": 3.8693532943725586, | |
| "learning_rate": 1.5159525046849656e-05, | |
| "loss": 0.5655, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.7322719327254689, | |
| "grad_norm": 56.192054748535156, | |
| "learning_rate": 1.5120234327018473e-05, | |
| "loss": 0.5617, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.7381773515377711, | |
| "grad_norm": 1.0195658206939697, | |
| "learning_rate": 1.5080864868269793e-05, | |
| "loss": 0.5524, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.7440827703500732, | |
| "grad_norm": 2.2935259342193604, | |
| "learning_rate": 1.5041495409521112e-05, | |
| "loss": 0.5626, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.7499881891623754, | |
| "grad_norm": 46.69443893432617, | |
| "learning_rate": 1.500212595077243e-05, | |
| "loss": 0.5839, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.7558936079746775, | |
| "grad_norm": 0.6499876976013184, | |
| "learning_rate": 1.496275649202375e-05, | |
| "loss": 0.5501, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.7617990267869797, | |
| "grad_norm": 6.221270561218262, | |
| "learning_rate": 1.4923387033275067e-05, | |
| "loss": 0.5691, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.7677044455992819, | |
| "grad_norm": 4.556798458099365, | |
| "learning_rate": 1.4884017574526387e-05, | |
| "loss": 0.5731, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.7736098644115841, | |
| "grad_norm": 3.3265926837921143, | |
| "learning_rate": 1.4844648115777704e-05, | |
| "loss": 0.5697, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.7795152832238862, | |
| "grad_norm": 4.640722274780273, | |
| "learning_rate": 1.4805278657029026e-05, | |
| "loss": 0.5832, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.7854207020361884, | |
| "grad_norm": 4.069241046905518, | |
| "learning_rate": 1.4765909198280343e-05, | |
| "loss": 0.5997, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.7913261208484905, | |
| "grad_norm": 4.073843479156494, | |
| "learning_rate": 1.4726539739531663e-05, | |
| "loss": 0.5475, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.7972315396607927, | |
| "grad_norm": 3.5267622470855713, | |
| "learning_rate": 1.468717028078298e-05, | |
| "loss": 0.551, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.803136958473095, | |
| "grad_norm": 3.477355480194092, | |
| "learning_rate": 1.46478008220343e-05, | |
| "loss": 0.5237, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.8090423772853971, | |
| "grad_norm": 2.0641283988952637, | |
| "learning_rate": 1.4608431363285618e-05, | |
| "loss": 0.5747, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.8149477960976993, | |
| "grad_norm": 9.778463363647461, | |
| "learning_rate": 1.4569140643454435e-05, | |
| "loss": 0.5741, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.8208532149100014, | |
| "grad_norm": 10.189579010009766, | |
| "learning_rate": 1.4529849923623252e-05, | |
| "loss": 0.5655, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.8267586337223036, | |
| "grad_norm": 7.845795631408691, | |
| "learning_rate": 1.449048046487457e-05, | |
| "loss": 0.558, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.8326640525346057, | |
| "grad_norm": 4.319666862487793, | |
| "learning_rate": 1.445111100612589e-05, | |
| "loss": 0.5814, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.838569471346908, | |
| "grad_norm": 7.940633296966553, | |
| "learning_rate": 1.4411741547377207e-05, | |
| "loss": 0.5671, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.8444748901592101, | |
| "grad_norm": 2.4097514152526855, | |
| "learning_rate": 1.4372372088628525e-05, | |
| "loss": 0.5851, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.8503803089715123, | |
| "grad_norm": 4.664578437805176, | |
| "learning_rate": 1.4333081368797344e-05, | |
| "loss": 0.5454, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.8562857277838144, | |
| "grad_norm": 3.250767230987549, | |
| "learning_rate": 1.4293711910048661e-05, | |
| "loss": 0.5581, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.8621911465961166, | |
| "grad_norm": 11.224815368652344, | |
| "learning_rate": 1.4254342451299981e-05, | |
| "loss": 0.5398, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.8680965654084187, | |
| "grad_norm": 0.5496272444725037, | |
| "learning_rate": 1.4214972992551299e-05, | |
| "loss": 0.5797, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.874001984220721, | |
| "grad_norm": 3.5080580711364746, | |
| "learning_rate": 1.4175603533802618e-05, | |
| "loss": 0.5421, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.8799074030330231, | |
| "grad_norm": 0.6545143723487854, | |
| "learning_rate": 1.4136312813971435e-05, | |
| "loss": 0.5369, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.8858128218453253, | |
| "grad_norm": 0.41881951689720154, | |
| "learning_rate": 1.4096943355222755e-05, | |
| "loss": 0.5392, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.8917182406576274, | |
| "grad_norm": 0.576518177986145, | |
| "learning_rate": 1.4057573896474073e-05, | |
| "loss": 0.5325, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.8976236594699296, | |
| "grad_norm": 3.194174289703369, | |
| "learning_rate": 1.4018204437725392e-05, | |
| "loss": 0.6139, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.9035290782822317, | |
| "grad_norm": 5.220823287963867, | |
| "learning_rate": 1.3978913717894207e-05, | |
| "loss": 0.5577, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.909434497094534, | |
| "grad_norm": 11.443092346191406, | |
| "learning_rate": 1.3939544259145525e-05, | |
| "loss": 0.5547, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.9153399159068362, | |
| "grad_norm": 0.620442807674408, | |
| "learning_rate": 1.3900253539314344e-05, | |
| "loss": 0.5679, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.9212453347191383, | |
| "grad_norm": 0.49119341373443604, | |
| "learning_rate": 1.3860884080565662e-05, | |
| "loss": 0.5166, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.9271507535314405, | |
| "grad_norm": 15.640247344970703, | |
| "learning_rate": 1.3821514621816981e-05, | |
| "loss": 0.58, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.9330561723437426, | |
| "grad_norm": 2.5728659629821777, | |
| "learning_rate": 1.3782145163068299e-05, | |
| "loss": 0.5688, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.9389615911560448, | |
| "grad_norm": 0.5715045928955078, | |
| "learning_rate": 1.3742775704319617e-05, | |
| "loss": 0.5345, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.9448670099683469, | |
| "grad_norm": 0.5369954109191895, | |
| "learning_rate": 1.3703406245570936e-05, | |
| "loss": 0.5259, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.9507724287806492, | |
| "grad_norm": 14.094406127929688, | |
| "learning_rate": 1.3664036786822257e-05, | |
| "loss": 0.5841, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.9566778475929513, | |
| "grad_norm": 45.60070037841797, | |
| "learning_rate": 1.3624667328073575e-05, | |
| "loss": 0.524, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.9625832664052535, | |
| "grad_norm": 0.13448235392570496, | |
| "learning_rate": 1.3585297869324893e-05, | |
| "loss": 0.5377, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.9684886852175556, | |
| "grad_norm": 41.70237731933594, | |
| "learning_rate": 1.3545928410576212e-05, | |
| "loss": 0.562, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.9743941040298578, | |
| "grad_norm": 4.349485874176025, | |
| "learning_rate": 1.350655895182753e-05, | |
| "loss": 0.5193, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.9802995228421599, | |
| "grad_norm": 0.3272392749786377, | |
| "learning_rate": 1.346718949307885e-05, | |
| "loss": 0.5396, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.9862049416544622, | |
| "grad_norm": 0.6910300254821777, | |
| "learning_rate": 1.3427820034330171e-05, | |
| "loss": 0.5163, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.9921103604667643, | |
| "grad_norm": 1.2478801012039185, | |
| "learning_rate": 1.3388450575581489e-05, | |
| "loss": 0.5474, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.9980157792790665, | |
| "grad_norm": 26.1578369140625, | |
| "learning_rate": 1.3349159855750304e-05, | |
| "loss": 0.5688, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.492972828269308, | |
| "eval_f1": 0.5198082616664317, | |
| "eval_loss": 0.5405558347702026, | |
| "eval_roc_auc": 0.6791354970679471, | |
| "eval_runtime": 111.677, | |
| "eval_samples_per_second": 66.898, | |
| "eval_steps_per_second": 66.898, | |
| "step": 84668 | |
| }, | |
| { | |
| "epoch": 1.0039211980913687, | |
| "grad_norm": 16.267274856567383, | |
| "learning_rate": 1.3309790397001624e-05, | |
| "loss": 0.5494, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 1.0098266169036707, | |
| "grad_norm": 3.8913004398345947, | |
| "learning_rate": 1.3270420938252941e-05, | |
| "loss": 0.5073, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 1.015732035715973, | |
| "grad_norm": 4.086480617523193, | |
| "learning_rate": 1.323105147950426e-05, | |
| "loss": 0.5299, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 1.0216374545282751, | |
| "grad_norm": 0.4327312707901001, | |
| "learning_rate": 1.3191760759673078e-05, | |
| "loss": 0.5854, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 1.0275428733405774, | |
| "grad_norm": 4.263016223907471, | |
| "learning_rate": 1.3152391300924396e-05, | |
| "loss": 0.5211, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 1.0334482921528796, | |
| "grad_norm": 18.525869369506836, | |
| "learning_rate": 1.3113021842175715e-05, | |
| "loss": 0.5393, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 1.0393537109651816, | |
| "grad_norm": 15.347341537475586, | |
| "learning_rate": 1.3073652383427033e-05, | |
| "loss": 0.5444, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 1.0452591297774838, | |
| "grad_norm": 0.81917804479599, | |
| "learning_rate": 1.3034282924678352e-05, | |
| "loss": 0.5622, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 1.051164548589786, | |
| "grad_norm": 0.8243115544319153, | |
| "learning_rate": 1.299491346592967e-05, | |
| "loss": 0.5485, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 1.0570699674020883, | |
| "grad_norm": 0.9479914903640747, | |
| "learning_rate": 1.2955544007180991e-05, | |
| "loss": 0.5653, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 1.0629753862143902, | |
| "grad_norm": 11.97670841217041, | |
| "learning_rate": 1.2916253287349807e-05, | |
| "loss": 0.5698, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 1.0688808050266925, | |
| "grad_norm": 6.69007682800293, | |
| "learning_rate": 1.2876883828601126e-05, | |
| "loss": 0.5151, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 1.0747862238389947, | |
| "grad_norm": 17.800846099853516, | |
| "learning_rate": 1.2837514369852444e-05, | |
| "loss": 0.5431, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 1.080691642651297, | |
| "grad_norm": 6.926564693450928, | |
| "learning_rate": 1.2798144911103762e-05, | |
| "loss": 0.5562, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 1.086597061463599, | |
| "grad_norm": 3.1776883602142334, | |
| "learning_rate": 1.275885419127258e-05, | |
| "loss": 0.5498, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 1.0925024802759011, | |
| "grad_norm": 25.895919799804688, | |
| "learning_rate": 1.2719484732523898e-05, | |
| "loss": 0.5481, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 1.0984078990882034, | |
| "grad_norm": 0.28274840116500854, | |
| "learning_rate": 1.2680194012692715e-05, | |
| "loss": 0.5394, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 1.1043133179005056, | |
| "grad_norm": 0.2646944224834442, | |
| "learning_rate": 1.2640824553944033e-05, | |
| "loss": 0.5297, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 1.1102187367128076, | |
| "grad_norm": 0.24462255835533142, | |
| "learning_rate": 1.2601533834112849e-05, | |
| "loss": 0.5158, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 1.1161241555251098, | |
| "grad_norm": 10.8624267578125, | |
| "learning_rate": 1.2562164375364168e-05, | |
| "loss": 0.5647, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 1.122029574337412, | |
| "grad_norm": 4.669233798980713, | |
| "learning_rate": 1.2522794916615488e-05, | |
| "loss": 0.5599, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 1.1279349931497142, | |
| "grad_norm": 7.873256206512451, | |
| "learning_rate": 1.2483425457866807e-05, | |
| "loss": 0.555, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 1.1338404119620162, | |
| "grad_norm": 0.3633436858654022, | |
| "learning_rate": 1.2444055999118125e-05, | |
| "loss": 0.5347, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 1.1397458307743185, | |
| "grad_norm": 63.884368896484375, | |
| "learning_rate": 1.2404686540369444e-05, | |
| "loss": 0.5518, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 1.1456512495866207, | |
| "grad_norm": 9.837042808532715, | |
| "learning_rate": 1.2365317081620762e-05, | |
| "loss": 0.5211, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 1.151556668398923, | |
| "grad_norm": 0.36800965666770935, | |
| "learning_rate": 1.2325947622872082e-05, | |
| "loss": 0.5463, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 1.1574620872112251, | |
| "grad_norm": 11.544560432434082, | |
| "learning_rate": 1.2286578164123401e-05, | |
| "loss": 0.54, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 1.1633675060235271, | |
| "grad_norm": 3.7216811180114746, | |
| "learning_rate": 1.224720870537472e-05, | |
| "loss": 0.5314, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 1.1692729248358293, | |
| "grad_norm": 12.869535446166992, | |
| "learning_rate": 1.2207917985543536e-05, | |
| "loss": 0.5655, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 1.1751783436481316, | |
| "grad_norm": 1.4455419778823853, | |
| "learning_rate": 1.2168548526794854e-05, | |
| "loss": 0.5208, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 1.1810837624604338, | |
| "grad_norm": 0.6675042510032654, | |
| "learning_rate": 1.2129179068046173e-05, | |
| "loss": 0.5584, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 1.1869891812727358, | |
| "grad_norm": 0.5745788812637329, | |
| "learning_rate": 1.2089888348214989e-05, | |
| "loss": 0.5369, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 1.192894600085038, | |
| "grad_norm": 0.47152823209762573, | |
| "learning_rate": 1.205051888946631e-05, | |
| "loss": 0.5601, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 1.1988000188973402, | |
| "grad_norm": 0.3635103702545166, | |
| "learning_rate": 1.2011149430717628e-05, | |
| "loss": 0.5322, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 1.2047054377096424, | |
| "grad_norm": 6.082094192504883, | |
| "learning_rate": 1.1971779971968947e-05, | |
| "loss": 0.5548, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 1.2106108565219444, | |
| "grad_norm": 5.332359313964844, | |
| "learning_rate": 1.1932489252137762e-05, | |
| "loss": 0.5641, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 1.2165162753342467, | |
| "grad_norm": 0.7001695036888123, | |
| "learning_rate": 1.189311979338908e-05, | |
| "loss": 0.55, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 1.2224216941465489, | |
| "grad_norm": 0.35426071286201477, | |
| "learning_rate": 1.18537503346404e-05, | |
| "loss": 0.544, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 1.228327112958851, | |
| "grad_norm": 10.50427532196045, | |
| "learning_rate": 1.181438087589172e-05, | |
| "loss": 0.567, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 1.2342325317711533, | |
| "grad_norm": 0.4111456871032715, | |
| "learning_rate": 1.1775011417143039e-05, | |
| "loss": 0.5346, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 1.2401379505834553, | |
| "grad_norm": 3.6128838062286377, | |
| "learning_rate": 1.1735720697311854e-05, | |
| "loss": 0.5173, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 1.2460433693957575, | |
| "grad_norm": 0.2788158059120178, | |
| "learning_rate": 1.1696351238563173e-05, | |
| "loss": 0.5486, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 1.2519487882080598, | |
| "grad_norm": 2.4274394512176514, | |
| "learning_rate": 1.1656981779814491e-05, | |
| "loss": 0.573, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 1.2578542070203618, | |
| "grad_norm": 26.823347091674805, | |
| "learning_rate": 1.1617612321065809e-05, | |
| "loss": 0.5389, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 1.263759625832664, | |
| "grad_norm": 5.76626443862915, | |
| "learning_rate": 1.157824286231713e-05, | |
| "loss": 0.5697, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 1.2696650446449662, | |
| "grad_norm": 6.441776752471924, | |
| "learning_rate": 1.153887340356845e-05, | |
| "loss": 0.5172, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.2755704634572684, | |
| "grad_norm": 4.22221040725708, | |
| "learning_rate": 1.1499503944819767e-05, | |
| "loss": 0.5803, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.2814758822695707, | |
| "grad_norm": 0.2600265145301819, | |
| "learning_rate": 1.1460213224988583e-05, | |
| "loss": 0.5235, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.2873813010818727, | |
| "grad_norm": 3.5664291381835938, | |
| "learning_rate": 1.1420843766239902e-05, | |
| "loss": 0.5762, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.2932867198941749, | |
| "grad_norm": 4.590908050537109, | |
| "learning_rate": 1.1381474307491222e-05, | |
| "loss": 0.554, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.299192138706477, | |
| "grad_norm": 0.40613120794296265, | |
| "learning_rate": 1.1342183587660039e-05, | |
| "loss": 0.5396, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.305097557518779, | |
| "grad_norm": 0.6809604167938232, | |
| "learning_rate": 1.1302814128911357e-05, | |
| "loss": 0.5567, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.3110029763310815, | |
| "grad_norm": 5.76853084564209, | |
| "learning_rate": 1.1263444670162676e-05, | |
| "loss": 0.5376, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.3169083951433835, | |
| "grad_norm": 8.591459274291992, | |
| "learning_rate": 1.1224075211413994e-05, | |
| "loss": 0.5463, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.3228138139556858, | |
| "grad_norm": 9.847731590270996, | |
| "learning_rate": 1.1184705752665312e-05, | |
| "loss": 0.5211, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.328719232767988, | |
| "grad_norm": 2.084632158279419, | |
| "learning_rate": 1.1145336293916633e-05, | |
| "loss": 0.5687, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.33462465158029, | |
| "grad_norm": 0.2582547962665558, | |
| "learning_rate": 1.1105966835167952e-05, | |
| "loss": 0.5258, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.3405300703925922, | |
| "grad_norm": 4.441883563995361, | |
| "learning_rate": 1.106659737641927e-05, | |
| "loss": 0.5789, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.3464354892048944, | |
| "grad_norm": 3.4951181411743164, | |
| "learning_rate": 1.1027227917670588e-05, | |
| "loss": 0.5395, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.3523409080171966, | |
| "grad_norm": 7.546202659606934, | |
| "learning_rate": 1.0987858458921907e-05, | |
| "loss": 0.5546, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.3582463268294989, | |
| "grad_norm": 27.956619262695312, | |
| "learning_rate": 1.0948489000173225e-05, | |
| "loss": 0.5503, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 1.3641517456418009, | |
| "grad_norm": 14.313876152038574, | |
| "learning_rate": 1.0909119541424545e-05, | |
| "loss": 0.5552, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 1.370057164454103, | |
| "grad_norm": 3.3627312183380127, | |
| "learning_rate": 1.0869750082675866e-05, | |
| "loss": 0.562, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 1.3759625832664053, | |
| "grad_norm": 4.319688320159912, | |
| "learning_rate": 1.0830380623927184e-05, | |
| "loss": 0.513, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 1.3818680020787073, | |
| "grad_norm": 0.45721712708473206, | |
| "learning_rate": 1.0791089904095999e-05, | |
| "loss": 0.5349, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 1.3877734208910095, | |
| "grad_norm": 0.3523353934288025, | |
| "learning_rate": 1.0751720445347319e-05, | |
| "loss": 0.5367, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 1.3936788397033117, | |
| "grad_norm": 5.773875713348389, | |
| "learning_rate": 1.0712350986598636e-05, | |
| "loss": 0.5093, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 1.399584258515614, | |
| "grad_norm": 0.3608294725418091, | |
| "learning_rate": 1.0672981527849954e-05, | |
| "loss": 0.5124, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 1.4054896773279162, | |
| "grad_norm": 0.5569049119949341, | |
| "learning_rate": 1.0633612069101275e-05, | |
| "loss": 0.5796, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 1.4113950961402182, | |
| "grad_norm": 0.26475992798805237, | |
| "learning_rate": 1.0594242610352595e-05, | |
| "loss": 0.5263, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 1.4173005149525204, | |
| "grad_norm": 5.134099960327148, | |
| "learning_rate": 1.0555030629438908e-05, | |
| "loss": 0.498, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.4232059337648226, | |
| "grad_norm": 4.049677848815918, | |
| "learning_rate": 1.0515661170690225e-05, | |
| "loss": 0.5565, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 1.4291113525771248, | |
| "grad_norm": 4.562811851501465, | |
| "learning_rate": 1.0476291711941545e-05, | |
| "loss": 0.5173, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 1.435016771389427, | |
| "grad_norm": 0.3501911759376526, | |
| "learning_rate": 1.0436922253192864e-05, | |
| "loss": 0.5658, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 1.440922190201729, | |
| "grad_norm": 0.5130248665809631, | |
| "learning_rate": 1.0397552794444184e-05, | |
| "loss": 0.5157, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 1.4468276090140313, | |
| "grad_norm": 3.659543037414551, | |
| "learning_rate": 1.0358183335695502e-05, | |
| "loss": 0.5515, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 1.4527330278263335, | |
| "grad_norm": 0.807367742061615, | |
| "learning_rate": 1.0318892615864317e-05, | |
| "loss": 0.5411, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 1.4586384466386355, | |
| "grad_norm": 4.063622951507568, | |
| "learning_rate": 1.0279523157115637e-05, | |
| "loss": 0.5604, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 1.4645438654509377, | |
| "grad_norm": 0.5569178462028503, | |
| "learning_rate": 1.0240153698366954e-05, | |
| "loss": 0.546, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 1.47044928426324, | |
| "grad_norm": 0.4748586416244507, | |
| "learning_rate": 1.0200784239618276e-05, | |
| "loss": 0.5357, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 1.4763547030755422, | |
| "grad_norm": 0.5214936137199402, | |
| "learning_rate": 1.0161414780869593e-05, | |
| "loss": 0.5815, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 1.4822601218878444, | |
| "grad_norm": 0.48534107208251953, | |
| "learning_rate": 1.0122045322120913e-05, | |
| "loss": 0.5601, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 1.4881655407001464, | |
| "grad_norm": 4.443326473236084, | |
| "learning_rate": 1.008267586337223e-05, | |
| "loss": 0.5422, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 1.4940709595124486, | |
| "grad_norm": 0.4154174327850342, | |
| "learning_rate": 1.004330640462355e-05, | |
| "loss": 0.532, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 1.4999763783247508, | |
| "grad_norm": 0.4233492612838745, | |
| "learning_rate": 1.0003936945874868e-05, | |
| "loss": 0.5632, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 1.5058817971370528, | |
| "grad_norm": 4.048283576965332, | |
| "learning_rate": 9.964646226043685e-06, | |
| "loss": 0.5167, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 1.5117872159493553, | |
| "grad_norm": 18.454174041748047, | |
| "learning_rate": 9.925355506212502e-06, | |
| "loss": 0.5577, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.5176926347616573, | |
| "grad_norm": 6.030921459197998, | |
| "learning_rate": 9.88598604746382e-06, | |
| "loss": 0.5741, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 1.5235980535739595, | |
| "grad_norm": 0.2656106948852539, | |
| "learning_rate": 9.84661658871514e-06, | |
| "loss": 0.5258, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 1.5295034723862617, | |
| "grad_norm": 13.179415702819824, | |
| "learning_rate": 9.807247129966459e-06, | |
| "loss": 0.5633, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 1.5354088911985637, | |
| "grad_norm": 9.94273567199707, | |
| "learning_rate": 9.767877671217777e-06, | |
| "loss": 0.5204, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 1.541314310010866, | |
| "grad_norm": 0.41358762979507446, | |
| "learning_rate": 9.728586951386592e-06, | |
| "loss": 0.5533, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 1.5472197288231682, | |
| "grad_norm": 10.123603820800781, | |
| "learning_rate": 9.689217492637913e-06, | |
| "loss": 0.5438, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 1.5531251476354702, | |
| "grad_norm": 0.499338299036026, | |
| "learning_rate": 9.64984803388923e-06, | |
| "loss": 0.5479, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 1.5590305664477726, | |
| "grad_norm": 0.45118188858032227, | |
| "learning_rate": 9.610478575140549e-06, | |
| "loss": 0.5273, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 1.5649359852600746, | |
| "grad_norm": 7.0303473472595215, | |
| "learning_rate": 9.57110911639187e-06, | |
| "loss": 0.5668, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 1.5708414040723768, | |
| "grad_norm": 3.8048393726348877, | |
| "learning_rate": 9.531739657643188e-06, | |
| "loss": 0.5587, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 1.576746822884679, | |
| "grad_norm": 4.167747974395752, | |
| "learning_rate": 9.492370198894505e-06, | |
| "loss": 0.5506, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 1.582652241696981, | |
| "grad_norm": 0.5552203059196472, | |
| "learning_rate": 9.453000740145825e-06, | |
| "loss": 0.5672, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 1.5885576605092833, | |
| "grad_norm": 14.452466011047363, | |
| "learning_rate": 9.413631281397144e-06, | |
| "loss": 0.5626, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 1.5944630793215855, | |
| "grad_norm": 3.762688636779785, | |
| "learning_rate": 9.374261822648462e-06, | |
| "loss": 0.5351, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.6003684981338877, | |
| "grad_norm": 5.434040069580078, | |
| "learning_rate": 9.33497110281728e-06, | |
| "loss": 0.5493, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 1.60627391694619, | |
| "grad_norm": 0.42822033166885376, | |
| "learning_rate": 9.295601644068599e-06, | |
| "loss": 0.5373, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 1.612179335758492, | |
| "grad_norm": 5.176085472106934, | |
| "learning_rate": 9.256232185319916e-06, | |
| "loss": 0.5474, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 1.6180847545707941, | |
| "grad_norm": 0.9219861626625061, | |
| "learning_rate": 9.216862726571236e-06, | |
| "loss": 0.5238, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 1.6239901733830964, | |
| "grad_norm": 0.42050594091415405, | |
| "learning_rate": 9.177572006740051e-06, | |
| "loss": 0.5089, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 1.6298955921953984, | |
| "grad_norm": 0.4675411880016327, | |
| "learning_rate": 9.13820254799137e-06, | |
| "loss": 0.5347, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 1.6358010110077008, | |
| "grad_norm": 0.4429144561290741, | |
| "learning_rate": 9.098911828160188e-06, | |
| "loss": 0.53, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 1.6417064298200028, | |
| "grad_norm": 0.5213695168495178, | |
| "learning_rate": 9.059542369411506e-06, | |
| "loss": 0.5393, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 1.647611848632305, | |
| "grad_norm": 5.133869171142578, | |
| "learning_rate": 9.020172910662825e-06, | |
| "loss": 0.5551, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 1.6535172674446073, | |
| "grad_norm": 0.7379022836685181, | |
| "learning_rate": 8.980803451914145e-06, | |
| "loss": 0.5823, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 1.6594226862569093, | |
| "grad_norm": 9.675186157226562, | |
| "learning_rate": 8.941433993165462e-06, | |
| "loss": 0.5478, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 1.6653281050692115, | |
| "grad_norm": 8.614265441894531, | |
| "learning_rate": 8.902064534416782e-06, | |
| "loss": 0.5602, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 1.6712335238815137, | |
| "grad_norm": 5.278205871582031, | |
| "learning_rate": 8.862695075668101e-06, | |
| "loss": 0.5239, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 1.6771389426938157, | |
| "grad_norm": 0.5190747380256653, | |
| "learning_rate": 8.82332561691942e-06, | |
| "loss": 0.5538, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 1.6830443615061181, | |
| "grad_norm": 5.418485641479492, | |
| "learning_rate": 8.784034897088236e-06, | |
| "loss": 0.5344, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 1.6889497803184201, | |
| "grad_norm": 11.25378704071045, | |
| "learning_rate": 8.744665438339554e-06, | |
| "loss": 0.5454, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 1.6948551991307224, | |
| "grad_norm": 0.3652142584323883, | |
| "learning_rate": 8.705295979590873e-06, | |
| "loss": 0.5576, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 1.7007606179430246, | |
| "grad_norm": 5.730474472045898, | |
| "learning_rate": 8.665926520842191e-06, | |
| "loss": 0.5605, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 1.7066660367553266, | |
| "grad_norm": 3.815854072570801, | |
| "learning_rate": 8.62655706209351e-06, | |
| "loss": 0.5292, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 1.712571455567629, | |
| "grad_norm": 4.446095943450928, | |
| "learning_rate": 8.587266342262328e-06, | |
| "loss": 0.5489, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 1.718476874379931, | |
| "grad_norm": 14.915255546569824, | |
| "learning_rate": 8.547896883513647e-06, | |
| "loss": 0.5334, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 1.7243822931922332, | |
| "grad_norm": 4.688992977142334, | |
| "learning_rate": 8.508606163682463e-06, | |
| "loss": 0.5645, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 1.7302877120045355, | |
| "grad_norm": 1.7567044496536255, | |
| "learning_rate": 8.46923670493378e-06, | |
| "loss": 0.5774, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 1.7361931308168375, | |
| "grad_norm": 3.2886345386505127, | |
| "learning_rate": 8.4298672461851e-06, | |
| "loss": 0.582, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 1.7420985496291397, | |
| "grad_norm": 16.65200424194336, | |
| "learning_rate": 8.39049778743642e-06, | |
| "loss": 0.5144, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 1.748003968441442, | |
| "grad_norm": 4.4966301918029785, | |
| "learning_rate": 8.351128328687737e-06, | |
| "loss": 0.5518, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 1.753909387253744, | |
| "grad_norm": 6.592241287231445, | |
| "learning_rate": 8.311758869939057e-06, | |
| "loss": 0.5226, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 1.7598148060660463, | |
| "grad_norm": 2.397592782974243, | |
| "learning_rate": 8.272389411190376e-06, | |
| "loss": 0.5193, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 1.7657202248783483, | |
| "grad_norm": 3.1350808143615723, | |
| "learning_rate": 8.233019952441694e-06, | |
| "loss": 0.5367, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 1.7716256436906506, | |
| "grad_norm": 0.9412761330604553, | |
| "learning_rate": 8.193650493693013e-06, | |
| "loss": 0.5596, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 1.7775310625029528, | |
| "grad_norm": 1.5991017818450928, | |
| "learning_rate": 8.154359773861829e-06, | |
| "loss": 0.5282, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 1.7834364813152548, | |
| "grad_norm": 0.35781508684158325, | |
| "learning_rate": 8.115069054030646e-06, | |
| "loss": 0.5356, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 1.789341900127557, | |
| "grad_norm": 0.567905068397522, | |
| "learning_rate": 8.075699595281965e-06, | |
| "loss": 0.5814, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 1.7952473189398592, | |
| "grad_norm": 0.3652840256690979, | |
| "learning_rate": 8.036330136533283e-06, | |
| "loss": 0.5148, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 1.8011527377521612, | |
| "grad_norm": 8.219497680664062, | |
| "learning_rate": 7.996960677784603e-06, | |
| "loss": 0.5564, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 1.8070581565644637, | |
| "grad_norm": 11.082978248596191, | |
| "learning_rate": 7.957591219035922e-06, | |
| "loss": 0.5772, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 1.8129635753767657, | |
| "grad_norm": 9.251150131225586, | |
| "learning_rate": 7.91822176028724e-06, | |
| "loss": 0.5433, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 1.818868994189068, | |
| "grad_norm": 25.916170120239258, | |
| "learning_rate": 7.87885230153856e-06, | |
| "loss": 0.5636, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 1.82477441300137, | |
| "grad_norm": 21.578245162963867, | |
| "learning_rate": 7.839482842789879e-06, | |
| "loss": 0.5564, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 1.830679831813672, | |
| "grad_norm": 25.095073699951172, | |
| "learning_rate": 7.800192122958694e-06, | |
| "loss": 0.5253, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 1.8365852506259746, | |
| "grad_norm": 6.516243934631348, | |
| "learning_rate": 7.760822664210012e-06, | |
| "loss": 0.4997, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 1.8424906694382766, | |
| "grad_norm": 0.37588614225387573, | |
| "learning_rate": 7.721531944378829e-06, | |
| "loss": 0.5575, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 1.8483960882505788, | |
| "grad_norm": 0.3492718040943146, | |
| "learning_rate": 7.682162485630149e-06, | |
| "loss": 0.5366, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 1.854301507062881, | |
| "grad_norm": 4.217477321624756, | |
| "learning_rate": 7.642793026881466e-06, | |
| "loss": 0.5493, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 1.860206925875183, | |
| "grad_norm": 4.931800842285156, | |
| "learning_rate": 7.603423568132786e-06, | |
| "loss": 0.5619, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 1.8661123446874852, | |
| "grad_norm": 4.721713066101074, | |
| "learning_rate": 7.5640541093841045e-06, | |
| "loss": 0.5369, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 1.8720177634997874, | |
| "grad_norm": 0.4199652671813965, | |
| "learning_rate": 7.524684650635423e-06, | |
| "loss": 0.554, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 1.8779231823120894, | |
| "grad_norm": 18.513507843017578, | |
| "learning_rate": 7.4853151918867426e-06, | |
| "loss": 0.5413, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 1.8838286011243919, | |
| "grad_norm": 19.6885929107666, | |
| "learning_rate": 7.445945733138061e-06, | |
| "loss": 0.5472, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 1.8897340199366939, | |
| "grad_norm": 9.544459342956543, | |
| "learning_rate": 7.406655013306878e-06, | |
| "loss": 0.5185, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 1.895639438748996, | |
| "grad_norm": 4.329341411590576, | |
| "learning_rate": 7.367285554558197e-06, | |
| "loss": 0.488, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 1.9015448575612983, | |
| "grad_norm": 0.7727178931236267, | |
| "learning_rate": 7.3279160958095155e-06, | |
| "loss": 0.5912, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 1.9074502763736003, | |
| "grad_norm": 15.250825881958008, | |
| "learning_rate": 7.288546637060834e-06, | |
| "loss": 0.4984, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 1.9133556951859025, | |
| "grad_norm": 0.4790278673171997, | |
| "learning_rate": 7.249177178312154e-06, | |
| "loss": 0.5443, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 1.9192611139982048, | |
| "grad_norm": 13.285900115966797, | |
| "learning_rate": 7.209886458480969e-06, | |
| "loss": 0.5444, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 1.9251665328105068, | |
| "grad_norm": 7.882303237915039, | |
| "learning_rate": 7.1705169997322885e-06, | |
| "loss": 0.5251, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 1.9310719516228092, | |
| "grad_norm": 6.691035270690918, | |
| "learning_rate": 7.131147540983607e-06, | |
| "loss": 0.5672, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 1.9369773704351112, | |
| "grad_norm": 14.164164543151855, | |
| "learning_rate": 7.091778082234926e-06, | |
| "loss": 0.4994, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 1.9428827892474134, | |
| "grad_norm": 0.4879821538925171, | |
| "learning_rate": 7.0525661013212396e-06, | |
| "loss": 0.5328, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 1.9487882080597156, | |
| "grad_norm": 7.822917461395264, | |
| "learning_rate": 7.013196642572558e-06, | |
| "loss": 0.5718, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 1.9546936268720176, | |
| "grad_norm": 0.2471114546060562, | |
| "learning_rate": 6.973827183823877e-06, | |
| "loss": 0.5178, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 1.96059904568432, | |
| "grad_norm": 1.3693519830703735, | |
| "learning_rate": 6.934457725075196e-06, | |
| "loss": 0.5818, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 1.966504464496622, | |
| "grad_norm": 0.4519917070865631, | |
| "learning_rate": 6.895088266326515e-06, | |
| "loss": 0.5191, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 1.9724098833089243, | |
| "grad_norm": 4.211235046386719, | |
| "learning_rate": 6.855718807577834e-06, | |
| "loss": 0.536, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 1.9783153021212265, | |
| "grad_norm": 13.497679710388184, | |
| "learning_rate": 6.816349348829153e-06, | |
| "loss": 0.5625, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 1.9842207209335285, | |
| "grad_norm": 3.9584550857543945, | |
| "learning_rate": 6.776979890080472e-06, | |
| "loss": 0.5704, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 1.9901261397458307, | |
| "grad_norm": 15.123173713684082, | |
| "learning_rate": 6.73761043133179e-06, | |
| "loss": 0.5284, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 1.996031558558133, | |
| "grad_norm": 8.3848876953125, | |
| "learning_rate": 6.698319711500607e-06, | |
| "loss": 0.5433, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.497122205862669, | |
| "eval_f1": 0.5241418199760344, | |
| "eval_loss": 0.5884916186332703, | |
| "eval_roc_auc": 0.6818782020829143, | |
| "eval_runtime": 112.692, | |
| "eval_samples_per_second": 66.296, | |
| "eval_steps_per_second": 66.296, | |
| "step": 169336 | |
| }, | |
| { | |
| "epoch": 2.001936977370435, | |
| "grad_norm": 0.37359023094177246, | |
| "learning_rate": 6.659028991669423e-06, | |
| "loss": 0.5203, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 2.0078423961827374, | |
| "grad_norm": 0.13417910039424896, | |
| "learning_rate": 6.619659532920742e-06, | |
| "loss": 0.4761, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 2.0137478149950394, | |
| "grad_norm": 0.16143397986888885, | |
| "learning_rate": 6.580290074172061e-06, | |
| "loss": 0.5239, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 2.0196532338073414, | |
| "grad_norm": 5.961240291595459, | |
| "learning_rate": 6.5409206154233795e-06, | |
| "loss": 0.5321, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 2.025558652619644, | |
| "grad_norm": 10.15018367767334, | |
| "learning_rate": 6.501551156674698e-06, | |
| "loss": 0.4946, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 2.031464071431946, | |
| "grad_norm": 13.941854476928711, | |
| "learning_rate": 6.462260436843515e-06, | |
| "loss": 0.5375, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 2.0373694902442483, | |
| "grad_norm": 90.04341888427734, | |
| "learning_rate": 6.422890978094833e-06, | |
| "loss": 0.558, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 2.0432749090565503, | |
| "grad_norm": 0.3982486128807068, | |
| "learning_rate": 6.383521519346153e-06, | |
| "loss": 0.4909, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 2.0491803278688523, | |
| "grad_norm": 14.257329940795898, | |
| "learning_rate": 6.344152060597472e-06, | |
| "loss": 0.5097, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 2.0550857466811547, | |
| "grad_norm": 0.2994830310344696, | |
| "learning_rate": 6.304861340766287e-06, | |
| "loss": 0.523, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 2.0609911654934567, | |
| "grad_norm": 21.691524505615234, | |
| "learning_rate": 6.265491882017607e-06, | |
| "loss": 0.5254, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 2.066896584305759, | |
| "grad_norm": 7.8108134269714355, | |
| "learning_rate": 6.2261224232689255e-06, | |
| "loss": 0.548, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 2.072802003118061, | |
| "grad_norm": 6.7383928298950195, | |
| "learning_rate": 6.186752964520244e-06, | |
| "loss": 0.5098, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 2.078707421930363, | |
| "grad_norm": 5.449107646942139, | |
| "learning_rate": 6.1473835057715636e-06, | |
| "loss": 0.5506, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 2.0846128407426656, | |
| "grad_norm": 35.49790573120117, | |
| "learning_rate": 6.108014047022882e-06, | |
| "loss": 0.5142, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 2.0905182595549676, | |
| "grad_norm": 7.552992820739746, | |
| "learning_rate": 6.068644588274201e-06, | |
| "loss": 0.5508, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 2.0964236783672696, | |
| "grad_norm": 128.05538940429688, | |
| "learning_rate": 6.029353868443018e-06, | |
| "loss": 0.4966, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 2.102329097179572, | |
| "grad_norm": 8.854266166687012, | |
| "learning_rate": 5.989984409694336e-06, | |
| "loss": 0.4974, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 2.108234515991874, | |
| "grad_norm": 0.3475770950317383, | |
| "learning_rate": 5.950614950945654e-06, | |
| "loss": 0.5212, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 2.1141399348041765, | |
| "grad_norm": 2.3008108139038086, | |
| "learning_rate": 5.911245492196974e-06, | |
| "loss": 0.5311, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 2.1200453536164785, | |
| "grad_norm": 8.415946960449219, | |
| "learning_rate": 5.87195477236579e-06, | |
| "loss": 0.5654, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 2.1259507724287805, | |
| "grad_norm": 6.0155863761901855, | |
| "learning_rate": 5.832585313617109e-06, | |
| "loss": 0.5129, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 2.131856191241083, | |
| "grad_norm": 0.5401498079299927, | |
| "learning_rate": 5.793215854868428e-06, | |
| "loss": 0.5199, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 2.137761610053385, | |
| "grad_norm": 0.5212739109992981, | |
| "learning_rate": 5.753846396119747e-06, | |
| "loss": 0.5369, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 2.1436670288656874, | |
| "grad_norm": 15.587518692016602, | |
| "learning_rate": 5.714476937371065e-06, | |
| "loss": 0.5185, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 2.1495724476779894, | |
| "grad_norm": 0.30678337812423706, | |
| "learning_rate": 5.675186217539882e-06, | |
| "loss": 0.5435, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 2.1554778664902914, | |
| "grad_norm": 0.4348577558994293, | |
| "learning_rate": 5.6358167587912e-06, | |
| "loss": 0.5337, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 2.161383285302594, | |
| "grad_norm": 5.63192892074585, | |
| "learning_rate": 5.596526038960017e-06, | |
| "loss": 0.4621, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 2.167288704114896, | |
| "grad_norm": 0.7454834580421448, | |
| "learning_rate": 5.557156580211336e-06, | |
| "loss": 0.5674, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 2.173194122927198, | |
| "grad_norm": 0.37728533148765564, | |
| "learning_rate": 5.517787121462655e-06, | |
| "loss": 0.5372, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 2.1790995417395003, | |
| "grad_norm": 5.926390171051025, | |
| "learning_rate": 5.478417662713973e-06, | |
| "loss": 0.5288, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 2.1850049605518023, | |
| "grad_norm": 13.641364097595215, | |
| "learning_rate": 5.4391269428827894e-06, | |
| "loss": 0.5299, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 2.1909103793641047, | |
| "grad_norm": 24.8594970703125, | |
| "learning_rate": 5.399757484134108e-06, | |
| "loss": 0.5262, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 2.1968157981764067, | |
| "grad_norm": 8.509349822998047, | |
| "learning_rate": 5.3603880253854276e-06, | |
| "loss": 0.5463, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 2.2027212169887087, | |
| "grad_norm": 7.8245415687561035, | |
| "learning_rate": 5.321018566636746e-06, | |
| "loss": 0.5001, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 2.208626635801011, | |
| "grad_norm": 0.7356006503105164, | |
| "learning_rate": 5.281649107888065e-06, | |
| "loss": 0.4916, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 2.214532054613313, | |
| "grad_norm": 3.2764933109283447, | |
| "learning_rate": 5.242279649139384e-06, | |
| "loss": 0.5417, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 2.220437473425615, | |
| "grad_norm": 9.338927268981934, | |
| "learning_rate": 5.202910190390703e-06, | |
| "loss": 0.5484, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 2.2263428922379176, | |
| "grad_norm": 104.39839172363281, | |
| "learning_rate": 5.163540731642022e-06, | |
| "loss": 0.496, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 2.2322483110502196, | |
| "grad_norm": 0.22737614810466766, | |
| "learning_rate": 5.12417127289334e-06, | |
| "loss": 0.5135, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 2.238153729862522, | |
| "grad_norm": 7.401103973388672, | |
| "learning_rate": 5.084880553062157e-06, | |
| "loss": 0.4895, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 2.244059148674824, | |
| "grad_norm": 179.85498046875, | |
| "learning_rate": 5.045511094313476e-06, | |
| "loss": 0.4943, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 2.249964567487126, | |
| "grad_norm": 0.13216468691825867, | |
| "learning_rate": 5.006141635564795e-06, | |
| "loss": 0.5314, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 2.2558699862994285, | |
| "grad_norm": 58.944786071777344, | |
| "learning_rate": 4.966772176816114e-06, | |
| "loss": 0.5789, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 2.2617754051117305, | |
| "grad_norm": 6.750619411468506, | |
| "learning_rate": 4.92748145698493e-06, | |
| "loss": 0.512, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 2.2676808239240325, | |
| "grad_norm": 0.23294350504875183, | |
| "learning_rate": 4.888111998236249e-06, | |
| "loss": 0.5074, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 2.273586242736335, | |
| "grad_norm": 4.00797176361084, | |
| "learning_rate": 4.8487425394875675e-06, | |
| "loss": 0.4682, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 2.279491661548637, | |
| "grad_norm": 11.367417335510254, | |
| "learning_rate": 4.809373080738887e-06, | |
| "loss": 0.5287, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 2.2853970803609394, | |
| "grad_norm": 0.20395159721374512, | |
| "learning_rate": 4.770082360907702e-06, | |
| "loss": 0.5177, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 2.2913024991732414, | |
| "grad_norm": 0.48907470703125, | |
| "learning_rate": 4.730712902159022e-06, | |
| "loss": 0.5235, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 2.2972079179855434, | |
| "grad_norm": 1.3965227603912354, | |
| "learning_rate": 4.6913434434103405e-06, | |
| "loss": 0.5368, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 2.303113336797846, | |
| "grad_norm": 0.20722346007823944, | |
| "learning_rate": 4.651973984661659e-06, | |
| "loss": 0.5241, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 2.309018755610148, | |
| "grad_norm": 0.56623375415802, | |
| "learning_rate": 4.612604525912978e-06, | |
| "loss": 0.5127, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 2.3149241744224502, | |
| "grad_norm": 14.25666618347168, | |
| "learning_rate": 4.573313806081795e-06, | |
| "loss": 0.5303, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 2.3208295932347522, | |
| "grad_norm": 0.31170445680618286, | |
| "learning_rate": 4.5339443473331134e-06, | |
| "loss": 0.5233, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 2.3267350120470542, | |
| "grad_norm": 0.19042448699474335, | |
| "learning_rate": 4.494574888584432e-06, | |
| "loss": 0.5049, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 2.3326404308593567, | |
| "grad_norm": 16.922496795654297, | |
| "learning_rate": 4.455284168753248e-06, | |
| "loss": 0.5722, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 2.3385458496716587, | |
| "grad_norm": 9.51486587524414, | |
| "learning_rate": 4.415914710004568e-06, | |
| "loss": 0.502, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 2.3444512684839607, | |
| "grad_norm": 12.042534828186035, | |
| "learning_rate": 4.3765452512558856e-06, | |
| "loss": 0.5382, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 2.350356687296263, | |
| "grad_norm": 0.15318746864795685, | |
| "learning_rate": 4.337175792507205e-06, | |
| "loss": 0.4805, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 2.356262106108565, | |
| "grad_norm": 6.209574222564697, | |
| "learning_rate": 4.297885072676021e-06, | |
| "loss": 0.5378, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 2.3621675249208676, | |
| "grad_norm": 7.745547294616699, | |
| "learning_rate": 4.25851561392734e-06, | |
| "loss": 0.528, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 2.3680729437331696, | |
| "grad_norm": 51.523094177246094, | |
| "learning_rate": 4.2191461551786585e-06, | |
| "loss": 0.5285, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 2.3739783625454716, | |
| "grad_norm": 6.393804550170898, | |
| "learning_rate": 4.179776696429978e-06, | |
| "loss": 0.5711, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 2.379883781357774, | |
| "grad_norm": 0.2673965394496918, | |
| "learning_rate": 4.140407237681297e-06, | |
| "loss": 0.534, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 2.385789200170076, | |
| "grad_norm": 7.325765609741211, | |
| "learning_rate": 4.101037778932615e-06, | |
| "loss": 0.5324, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 2.3916946189823785, | |
| "grad_norm": 0.23673997819423676, | |
| "learning_rate": 4.061668320183935e-06, | |
| "loss": 0.5753, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 2.3976000377946804, | |
| "grad_norm": 47.96986389160156, | |
| "learning_rate": 4.022298861435253e-06, | |
| "loss": 0.4933, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 2.4035054566069824, | |
| "grad_norm": 7.979691505432129, | |
| "learning_rate": 3.982929402686572e-06, | |
| "loss": 0.5321, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 2.409410875419285, | |
| "grad_norm": 0.8339338302612305, | |
| "learning_rate": 3.943638682855388e-06, | |
| "loss": 0.5473, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 2.415316294231587, | |
| "grad_norm": 47.454368591308594, | |
| "learning_rate": 3.904269224106707e-06, | |
| "loss": 0.559, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 2.421221713043889, | |
| "grad_norm": 0.33361056447029114, | |
| "learning_rate": 3.864978504275523e-06, | |
| "loss": 0.5517, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 2.4271271318561913, | |
| "grad_norm": 17.990787506103516, | |
| "learning_rate": 3.8256090455268426e-06, | |
| "loss": 0.511, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 2.4330325506684933, | |
| "grad_norm": 9.788211822509766, | |
| "learning_rate": 3.7862395867781616e-06, | |
| "loss": 0.5394, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 2.4389379694807953, | |
| "grad_norm": 0.29322516918182373, | |
| "learning_rate": 3.74687012802948e-06, | |
| "loss": 0.5919, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 2.4448433882930978, | |
| "grad_norm": 81.08358764648438, | |
| "learning_rate": 3.707500669280799e-06, | |
| "loss": 0.5396, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 2.4507488071053998, | |
| "grad_norm": 0.4258115887641907, | |
| "learning_rate": 3.6682099494496155e-06, | |
| "loss": 0.472, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 2.456654225917702, | |
| "grad_norm": 6.371994972229004, | |
| "learning_rate": 3.6288404907009338e-06, | |
| "loss": 0.5882, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 2.462559644730004, | |
| "grad_norm": 11.043325424194336, | |
| "learning_rate": 3.589471031952253e-06, | |
| "loss": 0.5457, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 2.4684650635423067, | |
| "grad_norm": 9.996238708496094, | |
| "learning_rate": 3.550101573203572e-06, | |
| "loss": 0.5346, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 2.4743704823546087, | |
| "grad_norm": 0.32652753591537476, | |
| "learning_rate": 3.5107321144548905e-06, | |
| "loss": 0.5461, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 2.4802759011669107, | |
| "grad_norm": 0.2573184370994568, | |
| "learning_rate": 3.4713626557062096e-06, | |
| "loss": 0.5025, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 2.486181319979213, | |
| "grad_norm": 9.490194320678711, | |
| "learning_rate": 3.4319931969575286e-06, | |
| "loss": 0.5443, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 2.492086738791515, | |
| "grad_norm": 1.0135701894760132, | |
| "learning_rate": 3.3926237382088473e-06, | |
| "loss": 0.5176, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 2.497992157603817, | |
| "grad_norm": 0.3167944848537445, | |
| "learning_rate": 3.3533330183776635e-06, | |
| "loss": 0.542, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 2.5038975764161195, | |
| "grad_norm": 0.47053876519203186, | |
| "learning_rate": 3.3140422985464797e-06, | |
| "loss": 0.4944, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 2.5098029952284215, | |
| "grad_norm": 0.2583182752132416, | |
| "learning_rate": 3.2746728397977987e-06, | |
| "loss": 0.5167, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 2.5157084140407235, | |
| "grad_norm": 11.087862968444824, | |
| "learning_rate": 3.2353033810491174e-06, | |
| "loss": 0.5117, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 2.521613832853026, | |
| "grad_norm": 0.197793111205101, | |
| "learning_rate": 3.1959339223004364e-06, | |
| "loss": 0.5265, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 2.527519251665328, | |
| "grad_norm": 0.09116879105567932, | |
| "learning_rate": 3.1565644635517555e-06, | |
| "loss": 0.4583, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 2.5334246704776304, | |
| "grad_norm": 0.3125598728656769, | |
| "learning_rate": 3.1172737437205713e-06, | |
| "loss": 0.5588, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 2.5393300892899324, | |
| "grad_norm": 0.20921526849269867, | |
| "learning_rate": 3.0779042849718903e-06, | |
| "loss": 0.5181, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 2.545235508102235, | |
| "grad_norm": 0.8386293649673462, | |
| "learning_rate": 3.0385348262232094e-06, | |
| "loss": 0.5606, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 2.551140926914537, | |
| "grad_norm": 1.4247181415557861, | |
| "learning_rate": 2.9992441063920256e-06, | |
| "loss": 0.5632, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 2.557046345726839, | |
| "grad_norm": 56.29065704345703, | |
| "learning_rate": 2.9598746476433443e-06, | |
| "loss": 0.536, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 2.5629517645391413, | |
| "grad_norm": 0.20264124870300293, | |
| "learning_rate": 2.9205051888946633e-06, | |
| "loss": 0.5904, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 2.5688571833514433, | |
| "grad_norm": 5.4820733070373535, | |
| "learning_rate": 2.881135730145982e-06, | |
| "loss": 0.5075, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 2.5747626021637453, | |
| "grad_norm": 0.5356336832046509, | |
| "learning_rate": 2.841766271397301e-06, | |
| "loss": 0.5345, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 2.5806680209760477, | |
| "grad_norm": 11.729897499084473, | |
| "learning_rate": 2.80239681264862e-06, | |
| "loss": 0.5263, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 2.5865734397883497, | |
| "grad_norm": 0.6651723384857178, | |
| "learning_rate": 2.7630273538999387e-06, | |
| "loss": 0.5669, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 2.5924788586006517, | |
| "grad_norm": 0.3053622245788574, | |
| "learning_rate": 2.7236578951512578e-06, | |
| "loss": 0.5336, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 2.598384277412954, | |
| "grad_norm": 0.8806591033935547, | |
| "learning_rate": 2.684288436402577e-06, | |
| "loss": 0.5252, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 2.604289696225256, | |
| "grad_norm": 0.2266666144132614, | |
| "learning_rate": 2.6449189776538955e-06, | |
| "loss": 0.5234, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 2.610195115037558, | |
| "grad_norm": 16.65863800048828, | |
| "learning_rate": 2.6055495189052145e-06, | |
| "loss": 0.5589, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 2.6161005338498606, | |
| "grad_norm": 3.0743777751922607, | |
| "learning_rate": 2.5662587990740307e-06, | |
| "loss": 0.5399, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 2.622005952662163, | |
| "grad_norm": 3.1333186626434326, | |
| "learning_rate": 2.5268893403253494e-06, | |
| "loss": 0.4949, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 2.627911371474465, | |
| "grad_norm": 20.595041275024414, | |
| "learning_rate": 2.4875198815766684e-06, | |
| "loss": 0.5801, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 2.633816790286767, | |
| "grad_norm": 0.6876373887062073, | |
| "learning_rate": 2.448150422827987e-06, | |
| "loss": 0.4822, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 2.6397222090990695, | |
| "grad_norm": 0.30532965064048767, | |
| "learning_rate": 2.4088597029968033e-06, | |
| "loss": 0.496, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 2.6456276279113715, | |
| "grad_norm": 0.18945026397705078, | |
| "learning_rate": 2.3694902442481223e-06, | |
| "loss": 0.5391, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 2.6515330467236735, | |
| "grad_norm": 0.17213118076324463, | |
| "learning_rate": 2.3301207854994414e-06, | |
| "loss": 0.5181, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 2.657438465535976, | |
| "grad_norm": 0.49905282258987427, | |
| "learning_rate": 2.2908300656682576e-06, | |
| "loss": 0.5248, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 2.663343884348278, | |
| "grad_norm": 5.821164608001709, | |
| "learning_rate": 2.2514606069195762e-06, | |
| "loss": 0.5082, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 2.66924930316058, | |
| "grad_norm": 0.1486668437719345, | |
| "learning_rate": 2.2120911481708953e-06, | |
| "loss": 0.5384, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 2.6751547219728824, | |
| "grad_norm": 51.314842224121094, | |
| "learning_rate": 2.172721689422214e-06, | |
| "loss": 0.5064, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 2.6810601407851844, | |
| "grad_norm": 27.27803611755371, | |
| "learning_rate": 2.133352230673533e-06, | |
| "loss": 0.5766, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 2.6869655595974864, | |
| "grad_norm": 8.757097244262695, | |
| "learning_rate": 2.0939827719248516e-06, | |
| "loss": 0.5541, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 2.692870978409789, | |
| "grad_norm": 7.907627582550049, | |
| "learning_rate": 2.0546133131761707e-06, | |
| "loss": 0.5248, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 2.698776397222091, | |
| "grad_norm": 8.180453300476074, | |
| "learning_rate": 2.0152438544274893e-06, | |
| "loss": 0.4975, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 2.7046818160343933, | |
| "grad_norm": 0.3046265244483948, | |
| "learning_rate": 1.976031873513803e-06, | |
| "loss": 0.5666, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 2.7105872348466953, | |
| "grad_norm": 0.474902480840683, | |
| "learning_rate": 1.936662414765122e-06, | |
| "loss": 0.5491, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 2.7164926536589977, | |
| "grad_norm": 9.716066360473633, | |
| "learning_rate": 1.8972929560164408e-06, | |
| "loss": 0.5345, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 2.7223980724712997, | |
| "grad_norm": 0.4300796687602997, | |
| "learning_rate": 1.8579234972677599e-06, | |
| "loss": 0.565, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 2.7283034912836017, | |
| "grad_norm": 0.3592308759689331, | |
| "learning_rate": 1.8185540385190787e-06, | |
| "loss": 0.514, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 2.734208910095904, | |
| "grad_norm": 0.587499737739563, | |
| "learning_rate": 1.7791845797703973e-06, | |
| "loss": 0.5698, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 2.740114328908206, | |
| "grad_norm": 38.904014587402344, | |
| "learning_rate": 1.7398151210217162e-06, | |
| "loss": 0.5504, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 2.746019747720508, | |
| "grad_norm": 0.6263485550880432, | |
| "learning_rate": 1.7004456622730352e-06, | |
| "loss": 0.5276, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 2.7519251665328106, | |
| "grad_norm": 0.22008846700191498, | |
| "learning_rate": 1.661076203524354e-06, | |
| "loss": 0.5478, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 2.7578305853451126, | |
| "grad_norm": 0.35815027356147766, | |
| "learning_rate": 1.621706744775673e-06, | |
| "loss": 0.5106, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 2.7637360041574146, | |
| "grad_norm": 0.17706379294395447, | |
| "learning_rate": 1.5824160249444892e-06, | |
| "loss": 0.5848, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 2.769641422969717, | |
| "grad_norm": 0.4346071481704712, | |
| "learning_rate": 1.543046566195808e-06, | |
| "loss": 0.5091, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 2.775546841782019, | |
| "grad_norm": 2.6666038036346436, | |
| "learning_rate": 1.5036771074471268e-06, | |
| "loss": 0.5276, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 2.7814522605943215, | |
| "grad_norm": 0.46572089195251465, | |
| "learning_rate": 1.464307648698446e-06, | |
| "loss": 0.5342, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 2.7873576794066235, | |
| "grad_norm": 0.4890391528606415, | |
| "learning_rate": 1.4249381899497648e-06, | |
| "loss": 0.5455, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 2.793263098218926, | |
| "grad_norm": 0.31877875328063965, | |
| "learning_rate": 1.3855687312010834e-06, | |
| "loss": 0.5748, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 2.799168517031228, | |
| "grad_norm": 6.273202896118164, | |
| "learning_rate": 1.3462780113698998e-06, | |
| "loss": 0.5051, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 2.80507393584353, | |
| "grad_norm": 0.2578466534614563, | |
| "learning_rate": 1.3069085526212187e-06, | |
| "loss": 0.5167, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 2.8109793546558324, | |
| "grad_norm": 0.690581738948822, | |
| "learning_rate": 1.2675390938725377e-06, | |
| "loss": 0.54, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 2.8168847734681344, | |
| "grad_norm": 101.09083557128906, | |
| "learning_rate": 1.2281696351238564e-06, | |
| "loss": 0.5451, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 2.8227901922804364, | |
| "grad_norm": 66.44615936279297, | |
| "learning_rate": 1.1888001763751752e-06, | |
| "loss": 0.5273, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 2.828695611092739, | |
| "grad_norm": 10.100146293640137, | |
| "learning_rate": 1.1495094565439916e-06, | |
| "loss": 0.5677, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 2.834601029905041, | |
| "grad_norm": 10.71104621887207, | |
| "learning_rate": 1.1101399977953103e-06, | |
| "loss": 0.53, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 2.840506448717343, | |
| "grad_norm": 0.15605445206165314, | |
| "learning_rate": 1.0707705390466293e-06, | |
| "loss": 0.5373, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 2.8464118675296453, | |
| "grad_norm": 0.21397539973258972, | |
| "learning_rate": 1.0314010802979482e-06, | |
| "loss": 0.5476, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 2.8523172863419473, | |
| "grad_norm": 10.296300888061523, | |
| "learning_rate": 9.92031621549267e-07, | |
| "loss": 0.4866, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 2.8582227051542497, | |
| "grad_norm": 52.738101959228516, | |
| "learning_rate": 9.52662162800586e-07, | |
| "loss": 0.5803, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 2.8641281239665517, | |
| "grad_norm": 0.5352908968925476, | |
| "learning_rate": 9.133714429694021e-07, | |
| "loss": 0.5345, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 2.870033542778854, | |
| "grad_norm": 9.421175956726074, | |
| "learning_rate": 8.74001984220721e-07, | |
| "loss": 0.4852, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 2.875938961591156, | |
| "grad_norm": 5.814650535583496, | |
| "learning_rate": 8.346325254720399e-07, | |
| "loss": 0.5233, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 2.881844380403458, | |
| "grad_norm": 0.7075567245483398, | |
| "learning_rate": 7.952630667233588e-07, | |
| "loss": 0.5545, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 2.8877497992157606, | |
| "grad_norm": 0.2958831787109375, | |
| "learning_rate": 7.558936079746776e-07, | |
| "loss": 0.5806, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 2.8936552180280626, | |
| "grad_norm": 0.37747666239738464, | |
| "learning_rate": 7.166028881434939e-07, | |
| "loss": 0.495, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 2.8995606368403646, | |
| "grad_norm": 0.3326238989830017, | |
| "learning_rate": 6.772334293948127e-07, | |
| "loss": 0.5034, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 2.905466055652667, | |
| "grad_norm": 7.006785869598389, | |
| "learning_rate": 6.378639706461316e-07, | |
| "loss": 0.5441, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 2.911371474464969, | |
| "grad_norm": 0.21972544491291046, | |
| "learning_rate": 5.984945118974505e-07, | |
| "loss": 0.5684, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 2.917276893277271, | |
| "grad_norm": 5.747308254241943, | |
| "learning_rate": 5.591250531487694e-07, | |
| "loss": 0.5395, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 2.9231823120895735, | |
| "grad_norm": 0.17187917232513428, | |
| "learning_rate": 5.197555944000882e-07, | |
| "loss": 0.5133, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 2.9290877309018755, | |
| "grad_norm": 119.28266906738281, | |
| "learning_rate": 4.803861356514071e-07, | |
| "loss": 0.4941, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 2.9349931497141775, | |
| "grad_norm": 6.082529544830322, | |
| "learning_rate": 4.4109541582022334e-07, | |
| "loss": 0.6088, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 2.94089856852648, | |
| "grad_norm": 0.683964192867279, | |
| "learning_rate": 4.017259570715422e-07, | |
| "loss": 0.5156, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 2.946803987338782, | |
| "grad_norm": 7.52407693862915, | |
| "learning_rate": 3.6235649832286103e-07, | |
| "loss": 0.4735, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 2.9527094061510843, | |
| "grad_norm": 66.87844848632812, | |
| "learning_rate": 3.2298703957417993e-07, | |
| "loss": 0.5418, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 2.9586148249633863, | |
| "grad_norm": 21.71000862121582, | |
| "learning_rate": 2.8361758082549883e-07, | |
| "loss": 0.5636, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 2.964520243775689, | |
| "grad_norm": 0.41531023383140564, | |
| "learning_rate": 2.442481220768177e-07, | |
| "loss": 0.5394, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 2.970425662587991, | |
| "grad_norm": 10.309664726257324, | |
| "learning_rate": 2.0487866332813658e-07, | |
| "loss": 0.5248, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 2.976331081400293, | |
| "grad_norm": 10.109683990478516, | |
| "learning_rate": 1.6566668241445019e-07, | |
| "loss": 0.5491, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 2.9822365002125952, | |
| "grad_norm": 12.154350280761719, | |
| "learning_rate": 1.2629722366576906e-07, | |
| "loss": 0.5542, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 2.9881419190248972, | |
| "grad_norm": 22.466415405273438, | |
| "learning_rate": 8.692776491708793e-08, | |
| "loss": 0.5347, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 2.9940473378371992, | |
| "grad_norm": 0.20442518591880798, | |
| "learning_rate": 4.7558306168406806e-08, | |
| "loss": 0.5602, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 2.9999527566495017, | |
| "grad_norm": 0.14298155903816223, | |
| "learning_rate": 8.188847419725674e-09, | |
| "loss": 0.5458, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.4869495382144291, | |
| "eval_f1": 0.51451578724306, | |
| "eval_loss": 0.713314414024353, | |
| "eval_roc_auc": 0.6757688508976673, | |
| "eval_runtime": 112.702, | |
| "eval_samples_per_second": 66.29, | |
| "eval_steps_per_second": 66.29, | |
| "step": 254004 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 254004, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.6862505663566643e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |