| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 0, | |
| "global_step": 935, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0010695187165775401, | |
| "grad_norm": 0.6366559863090515, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4312, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0021390374331550803, | |
| "grad_norm": 0.4608285427093506, | |
| "learning_rate": 9.989304812834224e-06, | |
| "loss": 0.4195, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0032085561497326204, | |
| "grad_norm": 0.6134791374206543, | |
| "learning_rate": 9.97860962566845e-06, | |
| "loss": 0.4944, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0042780748663101605, | |
| "grad_norm": 0.8539131879806519, | |
| "learning_rate": 9.967914438502675e-06, | |
| "loss": 0.4361, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0053475935828877, | |
| "grad_norm": 0.6162527203559875, | |
| "learning_rate": 9.957219251336899e-06, | |
| "loss": 0.3507, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006417112299465241, | |
| "grad_norm": 0.6490266919136047, | |
| "learning_rate": 9.946524064171124e-06, | |
| "loss": 0.4654, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0074866310160427805, | |
| "grad_norm": 0.7485705018043518, | |
| "learning_rate": 9.935828877005348e-06, | |
| "loss": 0.5776, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.008556149732620321, | |
| "grad_norm": 0.4976222813129425, | |
| "learning_rate": 9.925133689839573e-06, | |
| "loss": 0.4793, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.009625668449197862, | |
| "grad_norm": 0.7200250625610352, | |
| "learning_rate": 9.914438502673798e-06, | |
| "loss": 0.5027, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.0106951871657754, | |
| "grad_norm": 0.8122205138206482, | |
| "learning_rate": 9.903743315508022e-06, | |
| "loss": 0.4902, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011764705882352941, | |
| "grad_norm": 0.707146167755127, | |
| "learning_rate": 9.893048128342247e-06, | |
| "loss": 0.4822, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.012834224598930482, | |
| "grad_norm": 0.588702380657196, | |
| "learning_rate": 9.882352941176472e-06, | |
| "loss": 0.4619, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.013903743315508022, | |
| "grad_norm": 0.9360224604606628, | |
| "learning_rate": 9.871657754010696e-06, | |
| "loss": 0.395, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.014973262032085561, | |
| "grad_norm": 0.6083442568778992, | |
| "learning_rate": 9.86096256684492e-06, | |
| "loss": 0.4282, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.016042780748663103, | |
| "grad_norm": 0.8113846182823181, | |
| "learning_rate": 9.850267379679145e-06, | |
| "loss": 0.4805, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.017112299465240642, | |
| "grad_norm": 0.6021324992179871, | |
| "learning_rate": 9.83957219251337e-06, | |
| "loss": 0.5058, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.01818181818181818, | |
| "grad_norm": 0.7136564254760742, | |
| "learning_rate": 9.828877005347594e-06, | |
| "loss": 0.4188, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.019251336898395723, | |
| "grad_norm": 0.9644091725349426, | |
| "learning_rate": 9.81818181818182e-06, | |
| "loss": 0.5303, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.020320855614973262, | |
| "grad_norm": 0.7079192996025085, | |
| "learning_rate": 9.807486631016043e-06, | |
| "loss": 0.5237, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.0213903743315508, | |
| "grad_norm": 0.5324834585189819, | |
| "learning_rate": 9.796791443850268e-06, | |
| "loss": 0.3634, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.022459893048128343, | |
| "grad_norm": 0.6512548923492432, | |
| "learning_rate": 9.786096256684493e-06, | |
| "loss": 0.4765, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.023529411764705882, | |
| "grad_norm": 0.7586005926132202, | |
| "learning_rate": 9.775401069518717e-06, | |
| "loss": 0.5483, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.02459893048128342, | |
| "grad_norm": 0.7750284075737, | |
| "learning_rate": 9.764705882352942e-06, | |
| "loss": 0.4723, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.025668449197860963, | |
| "grad_norm": 0.7075735926628113, | |
| "learning_rate": 9.754010695187166e-06, | |
| "loss": 0.5195, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.026737967914438502, | |
| "grad_norm": 0.5441696643829346, | |
| "learning_rate": 9.743315508021391e-06, | |
| "loss": 0.3831, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027807486631016044, | |
| "grad_norm": 1.3407716751098633, | |
| "learning_rate": 9.732620320855617e-06, | |
| "loss": 0.5684, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.028877005347593583, | |
| "grad_norm": 1.38676118850708, | |
| "learning_rate": 9.72192513368984e-06, | |
| "loss": 0.5142, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.029946524064171122, | |
| "grad_norm": 0.5766410231590271, | |
| "learning_rate": 9.711229946524064e-06, | |
| "loss": 0.4912, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.031016042780748664, | |
| "grad_norm": 0.6887979507446289, | |
| "learning_rate": 9.700534759358289e-06, | |
| "loss": 0.442, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.03208556149732621, | |
| "grad_norm": 0.4945356547832489, | |
| "learning_rate": 9.689839572192514e-06, | |
| "loss": 0.4676, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.033155080213903745, | |
| "grad_norm": 0.6520042419433594, | |
| "learning_rate": 9.679144385026738e-06, | |
| "loss": 0.3981, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.034224598930481284, | |
| "grad_norm": 0.6300604939460754, | |
| "learning_rate": 9.668449197860963e-06, | |
| "loss": 0.3692, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03529411764705882, | |
| "grad_norm": 0.6626391410827637, | |
| "learning_rate": 9.657754010695189e-06, | |
| "loss": 0.5211, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03636363636363636, | |
| "grad_norm": 0.7075443267822266, | |
| "learning_rate": 9.647058823529412e-06, | |
| "loss": 0.4722, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.0374331550802139, | |
| "grad_norm": 0.6479265689849854, | |
| "learning_rate": 9.636363636363638e-06, | |
| "loss": 0.4555, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.038502673796791446, | |
| "grad_norm": 0.7561677098274231, | |
| "learning_rate": 9.625668449197861e-06, | |
| "loss": 0.4842, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.039572192513368985, | |
| "grad_norm": 0.651950478553772, | |
| "learning_rate": 9.614973262032087e-06, | |
| "loss": 0.5071, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.040641711229946524, | |
| "grad_norm": 0.7275030612945557, | |
| "learning_rate": 9.604278074866312e-06, | |
| "loss": 0.4462, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.04171122994652406, | |
| "grad_norm": 0.6694655418395996, | |
| "learning_rate": 9.593582887700535e-06, | |
| "loss": 0.5971, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.0427807486631016, | |
| "grad_norm": 0.8619104027748108, | |
| "learning_rate": 9.582887700534759e-06, | |
| "loss": 0.5869, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04385026737967915, | |
| "grad_norm": 0.6188225746154785, | |
| "learning_rate": 9.572192513368986e-06, | |
| "loss": 0.5312, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.044919786096256686, | |
| "grad_norm": 0.8464980125427246, | |
| "learning_rate": 9.56149732620321e-06, | |
| "loss": 0.5801, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.045989304812834225, | |
| "grad_norm": 0.5679624080657959, | |
| "learning_rate": 9.550802139037433e-06, | |
| "loss": 0.4439, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.047058823529411764, | |
| "grad_norm": 0.6634694933891296, | |
| "learning_rate": 9.540106951871659e-06, | |
| "loss": 0.4904, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.0481283422459893, | |
| "grad_norm": 0.5624789595603943, | |
| "learning_rate": 9.529411764705882e-06, | |
| "loss": 0.5176, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04919786096256684, | |
| "grad_norm": 0.7145139575004578, | |
| "learning_rate": 9.518716577540108e-06, | |
| "loss": 0.5136, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.05026737967914439, | |
| "grad_norm": 0.7052988409996033, | |
| "learning_rate": 9.508021390374333e-06, | |
| "loss": 0.481, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.051336898395721926, | |
| "grad_norm": 0.6354258060455322, | |
| "learning_rate": 9.497326203208556e-06, | |
| "loss": 0.4184, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.052406417112299465, | |
| "grad_norm": 0.6184315085411072, | |
| "learning_rate": 9.486631016042782e-06, | |
| "loss": 0.3711, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.053475935828877004, | |
| "grad_norm": 0.6750270128250122, | |
| "learning_rate": 9.475935828877007e-06, | |
| "loss": 0.4446, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05454545454545454, | |
| "grad_norm": 0.8037489652633667, | |
| "learning_rate": 9.46524064171123e-06, | |
| "loss": 0.4136, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05561497326203209, | |
| "grad_norm": 0.8278997540473938, | |
| "learning_rate": 9.454545454545456e-06, | |
| "loss": 0.5232, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.05668449197860963, | |
| "grad_norm": 0.5268093943595886, | |
| "learning_rate": 9.44385026737968e-06, | |
| "loss": 0.4002, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.057754010695187166, | |
| "grad_norm": 1.2410372495651245, | |
| "learning_rate": 9.433155080213905e-06, | |
| "loss": 0.5577, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.058823529411764705, | |
| "grad_norm": 0.5676299333572388, | |
| "learning_rate": 9.422459893048129e-06, | |
| "loss": 0.4334, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.059893048128342244, | |
| "grad_norm": 0.9891718029975891, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.5769, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.06096256684491978, | |
| "grad_norm": 0.5137191414833069, | |
| "learning_rate": 9.401069518716577e-06, | |
| "loss": 0.4261, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.06203208556149733, | |
| "grad_norm": 0.8014167547225952, | |
| "learning_rate": 9.390374331550803e-06, | |
| "loss": 0.5629, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06310160427807486, | |
| "grad_norm": 0.7902475595474243, | |
| "learning_rate": 9.379679144385028e-06, | |
| "loss": 0.5405, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06417112299465241, | |
| "grad_norm": 0.9568945169448853, | |
| "learning_rate": 9.368983957219252e-06, | |
| "loss": 0.5568, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06524064171122995, | |
| "grad_norm": 0.6621415019035339, | |
| "learning_rate": 9.358288770053477e-06, | |
| "loss": 0.551, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06631016042780749, | |
| "grad_norm": 0.597981333732605, | |
| "learning_rate": 9.347593582887702e-06, | |
| "loss": 0.4285, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06737967914438503, | |
| "grad_norm": 0.5961741805076599, | |
| "learning_rate": 9.336898395721926e-06, | |
| "loss": 0.4404, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.06844919786096257, | |
| "grad_norm": 0.7040389180183411, | |
| "learning_rate": 9.326203208556151e-06, | |
| "loss": 0.4544, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.06951871657754011, | |
| "grad_norm": 0.6759967803955078, | |
| "learning_rate": 9.315508021390375e-06, | |
| "loss": 0.4913, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07058823529411765, | |
| "grad_norm": 0.6088681221008301, | |
| "learning_rate": 9.3048128342246e-06, | |
| "loss": 0.4644, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.07165775401069518, | |
| "grad_norm": 0.5489950776100159, | |
| "learning_rate": 9.294117647058824e-06, | |
| "loss": 0.4833, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.07272727272727272, | |
| "grad_norm": 0.5953671336174011, | |
| "learning_rate": 9.283422459893049e-06, | |
| "loss": 0.4205, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07379679144385026, | |
| "grad_norm": 0.6680759787559509, | |
| "learning_rate": 9.272727272727273e-06, | |
| "loss": 0.3943, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.0748663101604278, | |
| "grad_norm": 0.5784221887588501, | |
| "learning_rate": 9.262032085561498e-06, | |
| "loss": 0.4847, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07593582887700535, | |
| "grad_norm": 0.587465226650238, | |
| "learning_rate": 9.251336898395723e-06, | |
| "loss": 0.3806, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07700534759358289, | |
| "grad_norm": 0.5951926112174988, | |
| "learning_rate": 9.240641711229947e-06, | |
| "loss": 0.4128, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.07807486631016043, | |
| "grad_norm": 0.5389623045921326, | |
| "learning_rate": 9.229946524064172e-06, | |
| "loss": 0.4681, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.07914438502673797, | |
| "grad_norm": 0.684956431388855, | |
| "learning_rate": 9.219251336898396e-06, | |
| "loss": 0.5342, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.08021390374331551, | |
| "grad_norm": 0.590521514415741, | |
| "learning_rate": 9.208556149732621e-06, | |
| "loss": 0.4014, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08128342245989305, | |
| "grad_norm": 0.5421493649482727, | |
| "learning_rate": 9.197860962566846e-06, | |
| "loss": 0.3524, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.08235294117647059, | |
| "grad_norm": 0.6509561538696289, | |
| "learning_rate": 9.18716577540107e-06, | |
| "loss": 0.59, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.08342245989304813, | |
| "grad_norm": 0.4329707622528076, | |
| "learning_rate": 9.176470588235294e-06, | |
| "loss": 0.4438, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08449197860962566, | |
| "grad_norm": 0.6363930702209473, | |
| "learning_rate": 9.16577540106952e-06, | |
| "loss": 0.4896, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.0855614973262032, | |
| "grad_norm": 0.6219555139541626, | |
| "learning_rate": 9.155080213903744e-06, | |
| "loss": 0.4474, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08663101604278074, | |
| "grad_norm": 0.7087079286575317, | |
| "learning_rate": 9.144385026737968e-06, | |
| "loss": 0.5943, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.0877005347593583, | |
| "grad_norm": 0.7004481554031372, | |
| "learning_rate": 9.133689839572193e-06, | |
| "loss": 0.5187, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.08877005347593583, | |
| "grad_norm": 0.6082620620727539, | |
| "learning_rate": 9.122994652406418e-06, | |
| "loss": 0.55, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.08983957219251337, | |
| "grad_norm": 0.5777840614318848, | |
| "learning_rate": 9.112299465240642e-06, | |
| "loss": 0.4917, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.09090909090909091, | |
| "grad_norm": 0.5813891887664795, | |
| "learning_rate": 9.101604278074867e-06, | |
| "loss": 0.4492, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09197860962566845, | |
| "grad_norm": 0.8648094534873962, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.4521, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09304812834224599, | |
| "grad_norm": 0.5733294486999512, | |
| "learning_rate": 9.080213903743316e-06, | |
| "loss": 0.4834, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.09411764705882353, | |
| "grad_norm": 0.9621995687484741, | |
| "learning_rate": 9.069518716577542e-06, | |
| "loss": 0.4822, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09518716577540107, | |
| "grad_norm": 0.766180694103241, | |
| "learning_rate": 9.058823529411765e-06, | |
| "loss": 0.5908, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.0962566844919786, | |
| "grad_norm": 0.8607532978057861, | |
| "learning_rate": 9.04812834224599e-06, | |
| "loss": 0.4777, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09732620320855614, | |
| "grad_norm": 0.603285014629364, | |
| "learning_rate": 9.037433155080214e-06, | |
| "loss": 0.5192, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.09839572192513368, | |
| "grad_norm": 0.531568169593811, | |
| "learning_rate": 9.02673796791444e-06, | |
| "loss": 0.4388, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.09946524064171124, | |
| "grad_norm": 0.6672679781913757, | |
| "learning_rate": 9.016042780748663e-06, | |
| "loss": 0.4124, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.10053475935828877, | |
| "grad_norm": 0.45054206252098083, | |
| "learning_rate": 9.005347593582888e-06, | |
| "loss": 0.3824, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.10160427807486631, | |
| "grad_norm": 0.6651024222373962, | |
| "learning_rate": 8.994652406417112e-06, | |
| "loss": 0.4753, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.10267379679144385, | |
| "grad_norm": 0.7906668782234192, | |
| "learning_rate": 8.983957219251337e-06, | |
| "loss": 0.6038, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.10374331550802139, | |
| "grad_norm": 0.5423858165740967, | |
| "learning_rate": 8.973262032085563e-06, | |
| "loss": 0.4253, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.10481283422459893, | |
| "grad_norm": 0.8090050220489502, | |
| "learning_rate": 8.962566844919786e-06, | |
| "loss": 0.407, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.10588235294117647, | |
| "grad_norm": 0.6660706996917725, | |
| "learning_rate": 8.951871657754012e-06, | |
| "loss": 0.4835, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.10695187165775401, | |
| "grad_norm": 0.6135907769203186, | |
| "learning_rate": 8.941176470588237e-06, | |
| "loss": 0.4404, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.10802139037433155, | |
| "grad_norm": 0.8286933898925781, | |
| "learning_rate": 8.93048128342246e-06, | |
| "loss": 0.5026, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.10909090909090909, | |
| "grad_norm": 0.7944344282150269, | |
| "learning_rate": 8.919786096256686e-06, | |
| "loss": 0.6837, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.11016042780748662, | |
| "grad_norm": 0.7601520419120789, | |
| "learning_rate": 8.90909090909091e-06, | |
| "loss": 0.4103, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.11122994652406418, | |
| "grad_norm": 0.6285696029663086, | |
| "learning_rate": 8.898395721925135e-06, | |
| "loss": 0.4512, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.11229946524064172, | |
| "grad_norm": 0.6127632260322571, | |
| "learning_rate": 8.88770053475936e-06, | |
| "loss": 0.4691, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11336898395721925, | |
| "grad_norm": 0.49660244584083557, | |
| "learning_rate": 8.877005347593584e-06, | |
| "loss": 0.5308, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1144385026737968, | |
| "grad_norm": 0.6088698506355286, | |
| "learning_rate": 8.866310160427807e-06, | |
| "loss": 0.5598, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.11550802139037433, | |
| "grad_norm": 0.5381092429161072, | |
| "learning_rate": 8.855614973262033e-06, | |
| "loss": 0.4791, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.11657754010695187, | |
| "grad_norm": 0.5587130188941956, | |
| "learning_rate": 8.844919786096258e-06, | |
| "loss": 0.4545, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.11764705882352941, | |
| "grad_norm": 0.6570132970809937, | |
| "learning_rate": 8.834224598930481e-06, | |
| "loss": 0.5769, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11871657754010695, | |
| "grad_norm": 1.0005203485488892, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.5977, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.11978609625668449, | |
| "grad_norm": 0.5949186682701111, | |
| "learning_rate": 8.81283422459893e-06, | |
| "loss": 0.3734, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.12085561497326203, | |
| "grad_norm": 0.5384220480918884, | |
| "learning_rate": 8.802139037433156e-06, | |
| "loss": 0.5537, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.12192513368983957, | |
| "grad_norm": 0.6424034237861633, | |
| "learning_rate": 8.791443850267381e-06, | |
| "loss": 0.4357, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.12299465240641712, | |
| "grad_norm": 0.6371397972106934, | |
| "learning_rate": 8.780748663101605e-06, | |
| "loss": 0.5541, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.12406417112299466, | |
| "grad_norm": 0.6941685080528259, | |
| "learning_rate": 8.77005347593583e-06, | |
| "loss": 0.4628, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.12513368983957218, | |
| "grad_norm": 0.7181639075279236, | |
| "learning_rate": 8.759358288770055e-06, | |
| "loss": 0.4739, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.12620320855614972, | |
| "grad_norm": 0.5109501481056213, | |
| "learning_rate": 8.748663101604279e-06, | |
| "loss": 0.5632, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.12727272727272726, | |
| "grad_norm": 0.8353314399719238, | |
| "learning_rate": 8.737967914438502e-06, | |
| "loss": 0.5408, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.12834224598930483, | |
| "grad_norm": 0.5425900816917419, | |
| "learning_rate": 8.727272727272728e-06, | |
| "loss": 0.3225, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12941176470588237, | |
| "grad_norm": 0.566400408744812, | |
| "learning_rate": 8.716577540106953e-06, | |
| "loss": 0.4841, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1304812834224599, | |
| "grad_norm": 0.5954400897026062, | |
| "learning_rate": 8.705882352941177e-06, | |
| "loss": 0.4672, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.13155080213903744, | |
| "grad_norm": 0.7826038599014282, | |
| "learning_rate": 8.695187165775402e-06, | |
| "loss": 0.5764, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.13262032085561498, | |
| "grad_norm": 0.5208374857902527, | |
| "learning_rate": 8.684491978609626e-06, | |
| "loss": 0.5549, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.13368983957219252, | |
| "grad_norm": 0.7175931930541992, | |
| "learning_rate": 8.673796791443851e-06, | |
| "loss": 0.547, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13475935828877006, | |
| "grad_norm": 0.6294770240783691, | |
| "learning_rate": 8.663101604278076e-06, | |
| "loss": 0.4467, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.1358288770053476, | |
| "grad_norm": 0.4822896718978882, | |
| "learning_rate": 8.6524064171123e-06, | |
| "loss": 0.3759, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.13689839572192514, | |
| "grad_norm": 0.6432593464851379, | |
| "learning_rate": 8.641711229946525e-06, | |
| "loss": 0.4974, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.13796791443850268, | |
| "grad_norm": 0.7343258261680603, | |
| "learning_rate": 8.63101604278075e-06, | |
| "loss": 0.4225, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.13903743315508021, | |
| "grad_norm": 0.6687297224998474, | |
| "learning_rate": 8.620320855614974e-06, | |
| "loss": 0.5686, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14010695187165775, | |
| "grad_norm": 0.8913871049880981, | |
| "learning_rate": 8.609625668449198e-06, | |
| "loss": 0.5145, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.1411764705882353, | |
| "grad_norm": 0.7621857523918152, | |
| "learning_rate": 8.598930481283423e-06, | |
| "loss": 0.4971, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.14224598930481283, | |
| "grad_norm": 0.5073545575141907, | |
| "learning_rate": 8.588235294117647e-06, | |
| "loss": 0.4607, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.14331550802139037, | |
| "grad_norm": 0.6622806787490845, | |
| "learning_rate": 8.577540106951872e-06, | |
| "loss": 0.4918, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.1443850267379679, | |
| "grad_norm": 0.6523916125297546, | |
| "learning_rate": 8.566844919786097e-06, | |
| "loss": 0.458, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.14545454545454545, | |
| "grad_norm": 0.5197381377220154, | |
| "learning_rate": 8.556149732620321e-06, | |
| "loss": 0.5122, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.146524064171123, | |
| "grad_norm": 0.568813145160675, | |
| "learning_rate": 8.545454545454546e-06, | |
| "loss": 0.3033, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.14759358288770053, | |
| "grad_norm": 0.6391533017158508, | |
| "learning_rate": 8.534759358288771e-06, | |
| "loss": 0.5431, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.14866310160427806, | |
| "grad_norm": 0.7385016679763794, | |
| "learning_rate": 8.524064171122995e-06, | |
| "loss": 0.4428, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.1497326203208556, | |
| "grad_norm": 0.7838972210884094, | |
| "learning_rate": 8.51336898395722e-06, | |
| "loss": 0.5016, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.15080213903743314, | |
| "grad_norm": 0.564905047416687, | |
| "learning_rate": 8.502673796791444e-06, | |
| "loss": 0.4441, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.1518716577540107, | |
| "grad_norm": 0.6710098385810852, | |
| "learning_rate": 8.49197860962567e-06, | |
| "loss": 0.4722, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.15294117647058825, | |
| "grad_norm": 0.6159948706626892, | |
| "learning_rate": 8.481283422459895e-06, | |
| "loss": 0.6224, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.15401069518716579, | |
| "grad_norm": 0.8119845390319824, | |
| "learning_rate": 8.470588235294118e-06, | |
| "loss": 0.444, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.15508021390374332, | |
| "grad_norm": 0.6023182272911072, | |
| "learning_rate": 8.459893048128342e-06, | |
| "loss": 0.5095, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.15614973262032086, | |
| "grad_norm": 0.605734646320343, | |
| "learning_rate": 8.449197860962567e-06, | |
| "loss": 0.3677, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.1572192513368984, | |
| "grad_norm": 0.5443623065948486, | |
| "learning_rate": 8.438502673796792e-06, | |
| "loss": 0.4093, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.15828877005347594, | |
| "grad_norm": 0.6605389714241028, | |
| "learning_rate": 8.427807486631016e-06, | |
| "loss": 0.4866, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.15935828877005348, | |
| "grad_norm": 0.7168911695480347, | |
| "learning_rate": 8.417112299465241e-06, | |
| "loss": 0.5128, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.16042780748663102, | |
| "grad_norm": 0.6364812254905701, | |
| "learning_rate": 8.406417112299467e-06, | |
| "loss": 0.5079, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.16149732620320856, | |
| "grad_norm": 0.5908712148666382, | |
| "learning_rate": 8.39572192513369e-06, | |
| "loss": 0.3822, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.1625668449197861, | |
| "grad_norm": 0.6003589630126953, | |
| "learning_rate": 8.385026737967916e-06, | |
| "loss": 0.4911, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.16363636363636364, | |
| "grad_norm": 0.5347418189048767, | |
| "learning_rate": 8.37433155080214e-06, | |
| "loss": 0.4314, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.16470588235294117, | |
| "grad_norm": 0.691434919834137, | |
| "learning_rate": 8.363636363636365e-06, | |
| "loss": 0.4669, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.1657754010695187, | |
| "grad_norm": 0.6481773853302002, | |
| "learning_rate": 8.35294117647059e-06, | |
| "loss": 0.3829, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.16684491978609625, | |
| "grad_norm": 0.5532310605049133, | |
| "learning_rate": 8.342245989304813e-06, | |
| "loss": 0.5369, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.1679144385026738, | |
| "grad_norm": 0.5456653237342834, | |
| "learning_rate": 8.331550802139037e-06, | |
| "loss": 0.4708, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.16898395721925133, | |
| "grad_norm": 0.6962786912918091, | |
| "learning_rate": 8.320855614973262e-06, | |
| "loss": 0.6964, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.17005347593582887, | |
| "grad_norm": 0.6660104990005493, | |
| "learning_rate": 8.310160427807488e-06, | |
| "loss": 0.4741, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.1711229946524064, | |
| "grad_norm": 0.5634502172470093, | |
| "learning_rate": 8.299465240641711e-06, | |
| "loss": 0.5159, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.17219251336898395, | |
| "grad_norm": 0.5761899352073669, | |
| "learning_rate": 8.288770053475937e-06, | |
| "loss": 0.4679, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.17326203208556148, | |
| "grad_norm": 0.5285319089889526, | |
| "learning_rate": 8.27807486631016e-06, | |
| "loss": 0.4507, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.17433155080213902, | |
| "grad_norm": 0.5991924405097961, | |
| "learning_rate": 8.267379679144386e-06, | |
| "loss": 0.4408, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.1754010695187166, | |
| "grad_norm": 0.7946671843528748, | |
| "learning_rate": 8.256684491978611e-06, | |
| "loss": 0.5938, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.17647058823529413, | |
| "grad_norm": 0.5594061613082886, | |
| "learning_rate": 8.245989304812834e-06, | |
| "loss": 0.4395, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.17754010695187167, | |
| "grad_norm": 0.755118191242218, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 0.5176, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.1786096256684492, | |
| "grad_norm": 0.6518577933311462, | |
| "learning_rate": 8.224598930481285e-06, | |
| "loss": 0.3701, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.17967914438502675, | |
| "grad_norm": 0.7031956315040588, | |
| "learning_rate": 8.213903743315509e-06, | |
| "loss": 0.4626, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.18074866310160428, | |
| "grad_norm": 0.6160303354263306, | |
| "learning_rate": 8.203208556149734e-06, | |
| "loss": 0.534, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 0.6890814304351807, | |
| "learning_rate": 8.192513368983958e-06, | |
| "loss": 0.588, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18288770053475936, | |
| "grad_norm": 0.8038296699523926, | |
| "learning_rate": 8.181818181818183e-06, | |
| "loss": 0.6581, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.1839572192513369, | |
| "grad_norm": 0.7309796214103699, | |
| "learning_rate": 8.171122994652407e-06, | |
| "loss": 0.5581, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.18502673796791444, | |
| "grad_norm": 0.5128208994865417, | |
| "learning_rate": 8.160427807486632e-06, | |
| "loss": 0.6139, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.18609625668449198, | |
| "grad_norm": 0.6201470494270325, | |
| "learning_rate": 8.149732620320855e-06, | |
| "loss": 0.3605, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.18716577540106952, | |
| "grad_norm": 0.7024129033088684, | |
| "learning_rate": 8.13903743315508e-06, | |
| "loss": 0.5726, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.18823529411764706, | |
| "grad_norm": 0.6024894118309021, | |
| "learning_rate": 8.128342245989306e-06, | |
| "loss": 0.391, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.1893048128342246, | |
| "grad_norm": 0.6784148812294006, | |
| "learning_rate": 8.11764705882353e-06, | |
| "loss": 0.4688, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.19037433155080213, | |
| "grad_norm": 0.5690904855728149, | |
| "learning_rate": 8.106951871657755e-06, | |
| "loss": 0.4312, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.19144385026737967, | |
| "grad_norm": 0.4456072151660919, | |
| "learning_rate": 8.096256684491979e-06, | |
| "loss": 0.3891, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.1925133689839572, | |
| "grad_norm": 0.6868999600410461, | |
| "learning_rate": 8.085561497326204e-06, | |
| "loss": 0.6162, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19358288770053475, | |
| "grad_norm": 0.6011132597923279, | |
| "learning_rate": 8.07486631016043e-06, | |
| "loss": 0.4876, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.1946524064171123, | |
| "grad_norm": 0.5779730081558228, | |
| "learning_rate": 8.064171122994653e-06, | |
| "loss": 0.553, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.19572192513368983, | |
| "grad_norm": 0.49792301654815674, | |
| "learning_rate": 8.053475935828876e-06, | |
| "loss": 0.4309, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.19679144385026737, | |
| "grad_norm": 0.7544685006141663, | |
| "learning_rate": 8.042780748663103e-06, | |
| "loss": 0.4313, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.19786096256684493, | |
| "grad_norm": 0.5724061727523804, | |
| "learning_rate": 8.032085561497327e-06, | |
| "loss": 0.5095, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.19893048128342247, | |
| "grad_norm": 0.6025895476341248, | |
| "learning_rate": 8.02139037433155e-06, | |
| "loss": 0.4907, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.6024736762046814, | |
| "learning_rate": 8.010695187165776e-06, | |
| "loss": 0.5381, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.20106951871657755, | |
| "grad_norm": 0.6913965344429016, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.3748, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2021390374331551, | |
| "grad_norm": 0.4882488548755646, | |
| "learning_rate": 7.989304812834225e-06, | |
| "loss": 0.343, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.20320855614973263, | |
| "grad_norm": 0.6679972410202026, | |
| "learning_rate": 7.97860962566845e-06, | |
| "loss": 0.4423, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.20427807486631017, | |
| "grad_norm": 0.5501365065574646, | |
| "learning_rate": 7.967914438502674e-06, | |
| "loss": 0.5905, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.2053475935828877, | |
| "grad_norm": 0.6585186123847961, | |
| "learning_rate": 7.9572192513369e-06, | |
| "loss": 0.411, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.20641711229946524, | |
| "grad_norm": 0.7740308046340942, | |
| "learning_rate": 7.946524064171124e-06, | |
| "loss": 0.3687, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.20748663101604278, | |
| "grad_norm": 0.6177687644958496, | |
| "learning_rate": 7.935828877005348e-06, | |
| "loss": 0.4806, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.20855614973262032, | |
| "grad_norm": 0.6582660675048828, | |
| "learning_rate": 7.925133689839572e-06, | |
| "loss": 0.4186, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.20962566844919786, | |
| "grad_norm": 0.8725008964538574, | |
| "learning_rate": 7.914438502673799e-06, | |
| "loss": 0.5795, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.2106951871657754, | |
| "grad_norm": 1.3974719047546387, | |
| "learning_rate": 7.903743315508022e-06, | |
| "loss": 0.4838, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.21176470588235294, | |
| "grad_norm": 0.5945003628730774, | |
| "learning_rate": 7.893048128342246e-06, | |
| "loss": 0.4325, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.21283422459893048, | |
| "grad_norm": 0.6746075749397278, | |
| "learning_rate": 7.882352941176471e-06, | |
| "loss": 0.5138, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.21390374331550802, | |
| "grad_norm": 0.6204707622528076, | |
| "learning_rate": 7.871657754010695e-06, | |
| "loss": 0.548, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.21497326203208555, | |
| "grad_norm": 0.7009350657463074, | |
| "learning_rate": 7.86096256684492e-06, | |
| "loss": 0.4562, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2160427807486631, | |
| "grad_norm": 0.575414776802063, | |
| "learning_rate": 7.850267379679145e-06, | |
| "loss": 0.4624, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.21711229946524063, | |
| "grad_norm": 0.7425415515899658, | |
| "learning_rate": 7.839572192513369e-06, | |
| "loss": 0.4532, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.21818181818181817, | |
| "grad_norm": 0.6293154358863831, | |
| "learning_rate": 7.828877005347594e-06, | |
| "loss": 0.4418, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2192513368983957, | |
| "grad_norm": 0.5798196196556091, | |
| "learning_rate": 7.81818181818182e-06, | |
| "loss": 0.4183, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.22032085561497325, | |
| "grad_norm": 0.6445826888084412, | |
| "learning_rate": 7.807486631016043e-06, | |
| "loss": 0.5934, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.22139037433155082, | |
| "grad_norm": 0.653247058391571, | |
| "learning_rate": 7.796791443850269e-06, | |
| "loss": 0.485, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.22245989304812835, | |
| "grad_norm": 0.7346906661987305, | |
| "learning_rate": 7.786096256684492e-06, | |
| "loss": 0.5367, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.2235294117647059, | |
| "grad_norm": 0.6756159067153931, | |
| "learning_rate": 7.775401069518718e-06, | |
| "loss": 0.4428, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.22459893048128343, | |
| "grad_norm": 0.7097551822662354, | |
| "learning_rate": 7.764705882352941e-06, | |
| "loss": 0.5358, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.22566844919786097, | |
| "grad_norm": 0.6353034973144531, | |
| "learning_rate": 7.754010695187166e-06, | |
| "loss": 0.463, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.2267379679144385, | |
| "grad_norm": 0.6333077549934387, | |
| "learning_rate": 7.74331550802139e-06, | |
| "loss": 0.3989, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.22780748663101605, | |
| "grad_norm": 0.7387973666191101, | |
| "learning_rate": 7.732620320855615e-06, | |
| "loss": 0.4488, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.2288770053475936, | |
| "grad_norm": 0.695409893989563, | |
| "learning_rate": 7.72192513368984e-06, | |
| "loss": 0.4005, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.22994652406417113, | |
| "grad_norm": 0.5743019580841064, | |
| "learning_rate": 7.711229946524064e-06, | |
| "loss": 0.5459, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.23101604278074866, | |
| "grad_norm": 0.5673878788948059, | |
| "learning_rate": 7.70053475935829e-06, | |
| "loss": 0.5189, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.2320855614973262, | |
| "grad_norm": 0.6167505979537964, | |
| "learning_rate": 7.689839572192515e-06, | |
| "loss": 0.5378, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.23315508021390374, | |
| "grad_norm": 0.6391251087188721, | |
| "learning_rate": 7.679144385026739e-06, | |
| "loss": 0.6179, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.23422459893048128, | |
| "grad_norm": 0.5857836008071899, | |
| "learning_rate": 7.668449197860964e-06, | |
| "loss": 0.5712, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.23529411764705882, | |
| "grad_norm": 0.4999634325504303, | |
| "learning_rate": 7.657754010695187e-06, | |
| "loss": 0.3968, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.23636363636363636, | |
| "grad_norm": 0.8106687664985657, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 0.4262, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.2374331550802139, | |
| "grad_norm": 0.4779665470123291, | |
| "learning_rate": 7.636363636363638e-06, | |
| "loss": 0.3528, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.23850267379679144, | |
| "grad_norm": 0.677229106426239, | |
| "learning_rate": 7.625668449197862e-06, | |
| "loss": 0.6505, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.23957219251336898, | |
| "grad_norm": 0.6161805391311646, | |
| "learning_rate": 7.614973262032086e-06, | |
| "loss": 0.4189, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.24064171122994651, | |
| "grad_norm": 0.7654128074645996, | |
| "learning_rate": 7.604278074866311e-06, | |
| "loss": 0.4661, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.24171122994652405, | |
| "grad_norm": 0.5132816433906555, | |
| "learning_rate": 7.593582887700536e-06, | |
| "loss": 0.4207, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.2427807486631016, | |
| "grad_norm": 0.7365961670875549, | |
| "learning_rate": 7.58288770053476e-06, | |
| "loss": 0.6396, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.24385026737967913, | |
| "grad_norm": 0.7593777179718018, | |
| "learning_rate": 7.572192513368984e-06, | |
| "loss": 0.5485, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.2449197860962567, | |
| "grad_norm": 0.54847651720047, | |
| "learning_rate": 7.5614973262032085e-06, | |
| "loss": 0.3845, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.24598930481283424, | |
| "grad_norm": 0.545281171798706, | |
| "learning_rate": 7.550802139037434e-06, | |
| "loss": 0.3984, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.24705882352941178, | |
| "grad_norm": 0.7247041463851929, | |
| "learning_rate": 7.540106951871658e-06, | |
| "loss": 0.5634, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.24812834224598931, | |
| "grad_norm": 0.7590208053588867, | |
| "learning_rate": 7.529411764705883e-06, | |
| "loss": 0.5953, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.24919786096256685, | |
| "grad_norm": 0.6960892677307129, | |
| "learning_rate": 7.518716577540107e-06, | |
| "loss": 0.7098, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.25026737967914436, | |
| "grad_norm": 0.6028234958648682, | |
| "learning_rate": 7.5080213903743325e-06, | |
| "loss": 0.4663, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.25133689839572193, | |
| "grad_norm": 0.6062140464782715, | |
| "learning_rate": 7.497326203208557e-06, | |
| "loss": 0.5257, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.25240641711229944, | |
| "grad_norm": 0.5987381935119629, | |
| "learning_rate": 7.486631016042781e-06, | |
| "loss": 0.4501, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.253475935828877, | |
| "grad_norm": 0.6203760504722595, | |
| "learning_rate": 7.475935828877006e-06, | |
| "loss": 0.483, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.2545454545454545, | |
| "grad_norm": 0.694579541683197, | |
| "learning_rate": 7.465240641711231e-06, | |
| "loss": 0.5285, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.2556149732620321, | |
| "grad_norm": 0.6250628232955933, | |
| "learning_rate": 7.454545454545456e-06, | |
| "loss": 0.4216, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.25668449197860965, | |
| "grad_norm": 0.6472836136817932, | |
| "learning_rate": 7.44385026737968e-06, | |
| "loss": 0.5479, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.25775401069518716, | |
| "grad_norm": 0.8919947147369385, | |
| "learning_rate": 7.433155080213904e-06, | |
| "loss": 0.5599, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.25882352941176473, | |
| "grad_norm": 0.5914128422737122, | |
| "learning_rate": 7.422459893048128e-06, | |
| "loss": 0.4878, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.25989304812834224, | |
| "grad_norm": 0.700639545917511, | |
| "learning_rate": 7.4117647058823535e-06, | |
| "loss": 0.5227, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.2609625668449198, | |
| "grad_norm": 0.7693853378295898, | |
| "learning_rate": 7.401069518716578e-06, | |
| "loss": 0.5064, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.2620320855614973, | |
| "grad_norm": 0.6256438493728638, | |
| "learning_rate": 7.390374331550802e-06, | |
| "loss": 0.4444, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2631016042780749, | |
| "grad_norm": 0.563152551651001, | |
| "learning_rate": 7.379679144385027e-06, | |
| "loss": 0.4482, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.2641711229946524, | |
| "grad_norm": 0.9797747731208801, | |
| "learning_rate": 7.368983957219252e-06, | |
| "loss": 0.643, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.26524064171122996, | |
| "grad_norm": 0.5842085480690002, | |
| "learning_rate": 7.358288770053477e-06, | |
| "loss": 0.4595, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.2663101604278075, | |
| "grad_norm": 0.5596362948417664, | |
| "learning_rate": 7.347593582887701e-06, | |
| "loss": 0.4572, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.26737967914438504, | |
| "grad_norm": 0.5629756450653076, | |
| "learning_rate": 7.3368983957219256e-06, | |
| "loss": 0.4436, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.26844919786096255, | |
| "grad_norm": 0.680733859539032, | |
| "learning_rate": 7.326203208556151e-06, | |
| "loss": 0.5429, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.2695187165775401, | |
| "grad_norm": 0.7168539762496948, | |
| "learning_rate": 7.315508021390375e-06, | |
| "loss": 0.5728, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.27058823529411763, | |
| "grad_norm": 0.5405863523483276, | |
| "learning_rate": 7.3048128342246e-06, | |
| "loss": 0.4395, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.2716577540106952, | |
| "grad_norm": 0.549039363861084, | |
| "learning_rate": 7.294117647058823e-06, | |
| "loss": 0.3718, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.8648082613945007, | |
| "learning_rate": 7.2834224598930496e-06, | |
| "loss": 0.3989, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2737967914438503, | |
| "grad_norm": 0.540567934513092, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.6245, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.2748663101604278, | |
| "grad_norm": 0.5931840538978577, | |
| "learning_rate": 7.262032085561498e-06, | |
| "loss": 0.4086, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.27593582887700535, | |
| "grad_norm": 0.6359665989875793, | |
| "learning_rate": 7.251336898395722e-06, | |
| "loss": 0.3767, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.27700534759358286, | |
| "grad_norm": 0.9781519174575806, | |
| "learning_rate": 7.240641711229947e-06, | |
| "loss": 0.5089, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.27807486631016043, | |
| "grad_norm": 0.6503910422325134, | |
| "learning_rate": 7.229946524064172e-06, | |
| "loss": 0.4789, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.279144385026738, | |
| "grad_norm": 0.6399834752082825, | |
| "learning_rate": 7.219251336898396e-06, | |
| "loss": 0.5347, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.2802139037433155, | |
| "grad_norm": 0.5321598649024963, | |
| "learning_rate": 7.208556149732621e-06, | |
| "loss": 0.5581, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.2812834224598931, | |
| "grad_norm": 0.6039249897003174, | |
| "learning_rate": 7.197860962566845e-06, | |
| "loss": 0.4834, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.2823529411764706, | |
| "grad_norm": 0.5686516761779785, | |
| "learning_rate": 7.1871657754010706e-06, | |
| "loss": 0.4518, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.28342245989304815, | |
| "grad_norm": 0.6458876729011536, | |
| "learning_rate": 7.176470588235295e-06, | |
| "loss": 0.5539, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.28449197860962566, | |
| "grad_norm": 0.7126064896583557, | |
| "learning_rate": 7.1657754010695195e-06, | |
| "loss": 0.5956, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.28556149732620323, | |
| "grad_norm": 0.4839479625225067, | |
| "learning_rate": 7.155080213903743e-06, | |
| "loss": 0.4839, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.28663101604278074, | |
| "grad_norm": 0.5924274325370789, | |
| "learning_rate": 7.144385026737969e-06, | |
| "loss": 0.5022, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.2877005347593583, | |
| "grad_norm": 0.571283221244812, | |
| "learning_rate": 7.133689839572193e-06, | |
| "loss": 0.4315, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.2887700534759358, | |
| "grad_norm": 0.5989797115325928, | |
| "learning_rate": 7.122994652406417e-06, | |
| "loss": 0.4308, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.2898395721925134, | |
| "grad_norm": 0.6898683309555054, | |
| "learning_rate": 7.112299465240642e-06, | |
| "loss": 0.6749, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.2909090909090909, | |
| "grad_norm": 0.6592080593109131, | |
| "learning_rate": 7.101604278074867e-06, | |
| "loss": 0.5044, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.29197860962566846, | |
| "grad_norm": 0.6462112665176392, | |
| "learning_rate": 7.0909090909090916e-06, | |
| "loss": 0.534, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.293048128342246, | |
| "grad_norm": 0.8627530932426453, | |
| "learning_rate": 7.080213903743316e-06, | |
| "loss": 0.4417, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.29411764705882354, | |
| "grad_norm": 0.5488596558570862, | |
| "learning_rate": 7.0695187165775405e-06, | |
| "loss": 0.4305, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.29518716577540105, | |
| "grad_norm": 0.617226779460907, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.3903, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.2962566844919786, | |
| "grad_norm": 0.47178295254707336, | |
| "learning_rate": 7.04812834224599e-06, | |
| "loss": 0.4496, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.29732620320855613, | |
| "grad_norm": 0.69908207654953, | |
| "learning_rate": 7.037433155080215e-06, | |
| "loss": 0.5284, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.2983957219251337, | |
| "grad_norm": 0.7890012860298157, | |
| "learning_rate": 7.026737967914438e-06, | |
| "loss": 0.4681, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.2994652406417112, | |
| "grad_norm": 0.5878118276596069, | |
| "learning_rate": 7.0160427807486645e-06, | |
| "loss": 0.3688, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.30053475935828877, | |
| "grad_norm": 0.796548068523407, | |
| "learning_rate": 7.005347593582889e-06, | |
| "loss": 0.4746, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.3016042780748663, | |
| "grad_norm": 0.6081482172012329, | |
| "learning_rate": 6.9946524064171125e-06, | |
| "loss": 0.4598, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.30267379679144385, | |
| "grad_norm": 0.6515517830848694, | |
| "learning_rate": 6.983957219251337e-06, | |
| "loss": 0.5258, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.3037433155080214, | |
| "grad_norm": 0.6377502083778381, | |
| "learning_rate": 6.9732620320855615e-06, | |
| "loss": 0.6074, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.3048128342245989, | |
| "grad_norm": 0.6666169166564941, | |
| "learning_rate": 6.962566844919787e-06, | |
| "loss": 0.4476, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3058823529411765, | |
| "grad_norm": 0.6400924921035767, | |
| "learning_rate": 6.951871657754011e-06, | |
| "loss": 0.5965, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.306951871657754, | |
| "grad_norm": 0.4592711329460144, | |
| "learning_rate": 6.941176470588236e-06, | |
| "loss": 0.4714, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.30802139037433157, | |
| "grad_norm": 0.7707461714744568, | |
| "learning_rate": 6.93048128342246e-06, | |
| "loss": 0.5825, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.3090909090909091, | |
| "grad_norm": 0.5988249778747559, | |
| "learning_rate": 6.9197860962566855e-06, | |
| "loss": 0.363, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.31016042780748665, | |
| "grad_norm": 0.7031011581420898, | |
| "learning_rate": 6.90909090909091e-06, | |
| "loss": 0.4363, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.31122994652406416, | |
| "grad_norm": 0.690811038017273, | |
| "learning_rate": 6.898395721925134e-06, | |
| "loss": 0.5329, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.3122994652406417, | |
| "grad_norm": 0.7570518255233765, | |
| "learning_rate": 6.887700534759358e-06, | |
| "loss": 0.509, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.31336898395721924, | |
| "grad_norm": 0.6185449957847595, | |
| "learning_rate": 6.877005347593584e-06, | |
| "loss": 0.4722, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.3144385026737968, | |
| "grad_norm": 0.46442198753356934, | |
| "learning_rate": 6.866310160427808e-06, | |
| "loss": 0.3704, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.3155080213903743, | |
| "grad_norm": 0.6784112453460693, | |
| "learning_rate": 6.855614973262032e-06, | |
| "loss": 0.4218, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3165775401069519, | |
| "grad_norm": 0.5226501822471619, | |
| "learning_rate": 6.844919786096257e-06, | |
| "loss": 0.3197, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.3176470588235294, | |
| "grad_norm": 0.5758179426193237, | |
| "learning_rate": 6.834224598930482e-06, | |
| "loss": 0.4619, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.31871657754010696, | |
| "grad_norm": 0.5652340054512024, | |
| "learning_rate": 6.8235294117647065e-06, | |
| "loss": 0.4292, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.31978609625668447, | |
| "grad_norm": 0.58515465259552, | |
| "learning_rate": 6.812834224598931e-06, | |
| "loss": 0.4818, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.32085561497326204, | |
| "grad_norm": 0.5486282706260681, | |
| "learning_rate": 6.802139037433155e-06, | |
| "loss": 0.4746, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.32192513368983955, | |
| "grad_norm": 0.5772119164466858, | |
| "learning_rate": 6.791443850267381e-06, | |
| "loss": 0.4653, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.3229946524064171, | |
| "grad_norm": 1.8447701930999756, | |
| "learning_rate": 6.780748663101605e-06, | |
| "loss": 0.528, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.3240641711229946, | |
| "grad_norm": 0.5651145577430725, | |
| "learning_rate": 6.77005347593583e-06, | |
| "loss": 0.4767, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.3251336898395722, | |
| "grad_norm": 0.7518191933631897, | |
| "learning_rate": 6.759358288770054e-06, | |
| "loss": 0.3918, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.32620320855614976, | |
| "grad_norm": 0.685975193977356, | |
| "learning_rate": 6.748663101604279e-06, | |
| "loss": 0.573, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.32727272727272727, | |
| "grad_norm": 0.9086815714836121, | |
| "learning_rate": 6.737967914438504e-06, | |
| "loss": 0.5197, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.32834224598930484, | |
| "grad_norm": 0.5320605039596558, | |
| "learning_rate": 6.7272727272727275e-06, | |
| "loss": 0.494, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.32941176470588235, | |
| "grad_norm": 0.538621723651886, | |
| "learning_rate": 6.716577540106952e-06, | |
| "loss": 0.4894, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.3304812834224599, | |
| "grad_norm": 0.6562920212745667, | |
| "learning_rate": 6.705882352941176e-06, | |
| "loss": 0.3813, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.3315508021390374, | |
| "grad_norm": 0.5830225348472595, | |
| "learning_rate": 6.695187165775402e-06, | |
| "loss": 0.3865, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.332620320855615, | |
| "grad_norm": 0.6601799130439758, | |
| "learning_rate": 6.684491978609626e-06, | |
| "loss": 0.5243, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.3336898395721925, | |
| "grad_norm": 0.9210048317909241, | |
| "learning_rate": 6.673796791443851e-06, | |
| "loss": 0.4959, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.33475935828877007, | |
| "grad_norm": 0.8686543107032776, | |
| "learning_rate": 6.663101604278075e-06, | |
| "loss": 0.5762, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.3358288770053476, | |
| "grad_norm": 0.6231438517570496, | |
| "learning_rate": 6.6524064171123e-06, | |
| "loss": 0.5255, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.33689839572192515, | |
| "grad_norm": 0.6383395195007324, | |
| "learning_rate": 6.641711229946525e-06, | |
| "loss": 0.4719, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.33796791443850266, | |
| "grad_norm": 0.6856157779693604, | |
| "learning_rate": 6.631016042780749e-06, | |
| "loss": 0.5767, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3390374331550802, | |
| "grad_norm": 1.0427656173706055, | |
| "learning_rate": 6.620320855614974e-06, | |
| "loss": 0.4956, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.34010695187165774, | |
| "grad_norm": 0.6919543743133545, | |
| "learning_rate": 6.609625668449199e-06, | |
| "loss": 0.5916, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.3411764705882353, | |
| "grad_norm": 0.5555431842803955, | |
| "learning_rate": 6.5989304812834235e-06, | |
| "loss": 0.4071, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.3422459893048128, | |
| "grad_norm": 0.6733004450798035, | |
| "learning_rate": 6.588235294117647e-06, | |
| "loss": 0.4325, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3433155080213904, | |
| "grad_norm": 0.7849729657173157, | |
| "learning_rate": 6.577540106951872e-06, | |
| "loss": 0.4924, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.3443850267379679, | |
| "grad_norm": 0.6547059416770935, | |
| "learning_rate": 6.566844919786097e-06, | |
| "loss": 0.4026, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.34545454545454546, | |
| "grad_norm": 0.8233744502067566, | |
| "learning_rate": 6.556149732620321e-06, | |
| "loss": 0.5977, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.34652406417112297, | |
| "grad_norm": 0.6075732707977295, | |
| "learning_rate": 6.545454545454546e-06, | |
| "loss": 0.5458, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.34759358288770054, | |
| "grad_norm": 0.7115654349327087, | |
| "learning_rate": 6.53475935828877e-06, | |
| "loss": 0.5246, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.34866310160427805, | |
| "grad_norm": 0.6240348219871521, | |
| "learning_rate": 6.524064171122996e-06, | |
| "loss": 0.505, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.3497326203208556, | |
| "grad_norm": 0.8293918967247009, | |
| "learning_rate": 6.51336898395722e-06, | |
| "loss": 0.4799, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.3508021390374332, | |
| "grad_norm": 0.5830439329147339, | |
| "learning_rate": 6.5026737967914445e-06, | |
| "loss": 0.5083, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.3518716577540107, | |
| "grad_norm": 0.5443330407142639, | |
| "learning_rate": 6.491978609625669e-06, | |
| "loss": 0.5117, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.35294117647058826, | |
| "grad_norm": 0.5359131693840027, | |
| "learning_rate": 6.4812834224598935e-06, | |
| "loss": 0.4748, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.35401069518716577, | |
| "grad_norm": 0.6293145418167114, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 0.4465, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.35508021390374334, | |
| "grad_norm": 0.6219552159309387, | |
| "learning_rate": 6.459893048128343e-06, | |
| "loss": 0.5242, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.35614973262032085, | |
| "grad_norm": 0.6234537959098816, | |
| "learning_rate": 6.449197860962567e-06, | |
| "loss": 0.4395, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.3572192513368984, | |
| "grad_norm": 1.0062814950942993, | |
| "learning_rate": 6.438502673796791e-06, | |
| "loss": 0.6009, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.3582887700534759, | |
| "grad_norm": 0.7384465336799622, | |
| "learning_rate": 6.427807486631017e-06, | |
| "loss": 0.5029, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3593582887700535, | |
| "grad_norm": 0.532245397567749, | |
| "learning_rate": 6.417112299465241e-06, | |
| "loss": 0.4242, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.360427807486631, | |
| "grad_norm": 0.5797253847122192, | |
| "learning_rate": 6.4064171122994655e-06, | |
| "loss": 0.5145, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.36149732620320857, | |
| "grad_norm": 0.5592436194419861, | |
| "learning_rate": 6.39572192513369e-06, | |
| "loss": 0.3484, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.3625668449197861, | |
| "grad_norm": 0.6404398083686829, | |
| "learning_rate": 6.385026737967915e-06, | |
| "loss": 0.4522, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.5122445225715637, | |
| "learning_rate": 6.37433155080214e-06, | |
| "loss": 0.3615, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.36470588235294116, | |
| "grad_norm": 0.583816647529602, | |
| "learning_rate": 6.363636363636364e-06, | |
| "loss": 0.4531, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.3657754010695187, | |
| "grad_norm": 0.640694797039032, | |
| "learning_rate": 6.352941176470589e-06, | |
| "loss": 0.4083, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.36684491978609624, | |
| "grad_norm": 0.5248332023620605, | |
| "learning_rate": 6.342245989304814e-06, | |
| "loss": 0.4596, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.3679144385026738, | |
| "grad_norm": 0.6204382181167603, | |
| "learning_rate": 6.3315508021390385e-06, | |
| "loss": 0.5728, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.3689839572192513, | |
| "grad_norm": 0.6899979114532471, | |
| "learning_rate": 6.320855614973262e-06, | |
| "loss": 0.5717, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.3700534759358289, | |
| "grad_norm": 0.6148127913475037, | |
| "learning_rate": 6.3101604278074865e-06, | |
| "loss": 0.408, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.3711229946524064, | |
| "grad_norm": 0.5646189451217651, | |
| "learning_rate": 6.299465240641713e-06, | |
| "loss": 0.4088, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.37219251336898396, | |
| "grad_norm": 1.1132433414459229, | |
| "learning_rate": 6.288770053475936e-06, | |
| "loss": 0.5399, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.3732620320855615, | |
| "grad_norm": 0.6076653003692627, | |
| "learning_rate": 6.278074866310161e-06, | |
| "loss": 0.5203, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.37433155080213903, | |
| "grad_norm": 0.5028043985366821, | |
| "learning_rate": 6.267379679144385e-06, | |
| "loss": 0.4573, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3754010695187166, | |
| "grad_norm": 0.7646058201789856, | |
| "learning_rate": 6.25668449197861e-06, | |
| "loss": 0.4073, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.3764705882352941, | |
| "grad_norm": 0.7417605519294739, | |
| "learning_rate": 6.245989304812835e-06, | |
| "loss": 0.4856, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.3775401069518717, | |
| "grad_norm": 0.5920647978782654, | |
| "learning_rate": 6.2352941176470595e-06, | |
| "loss": 0.6174, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.3786096256684492, | |
| "grad_norm": 0.6706965565681458, | |
| "learning_rate": 6.224598930481284e-06, | |
| "loss": 0.4667, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.37967914438502676, | |
| "grad_norm": 0.6529900431632996, | |
| "learning_rate": 6.213903743315508e-06, | |
| "loss": 0.4825, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.38074866310160427, | |
| "grad_norm": 0.547052800655365, | |
| "learning_rate": 6.203208556149734e-06, | |
| "loss": 0.4388, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.38181818181818183, | |
| "grad_norm": 0.5850919485092163, | |
| "learning_rate": 6.192513368983958e-06, | |
| "loss": 0.5413, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.38288770053475935, | |
| "grad_norm": 0.7986252903938293, | |
| "learning_rate": 6.181818181818182e-06, | |
| "loss": 0.4004, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.3839572192513369, | |
| "grad_norm": 1.109505295753479, | |
| "learning_rate": 6.171122994652406e-06, | |
| "loss": 0.5085, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.3850267379679144, | |
| "grad_norm": 0.6533443331718445, | |
| "learning_rate": 6.1604278074866315e-06, | |
| "loss": 0.5011, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.386096256684492, | |
| "grad_norm": 0.5781113505363464, | |
| "learning_rate": 6.149732620320856e-06, | |
| "loss": 0.4831, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.3871657754010695, | |
| "grad_norm": 0.4751874804496765, | |
| "learning_rate": 6.1390374331550805e-06, | |
| "loss": 0.3976, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.38823529411764707, | |
| "grad_norm": 0.5545620918273926, | |
| "learning_rate": 6.128342245989305e-06, | |
| "loss": 0.5529, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.3893048128342246, | |
| "grad_norm": 0.8879708647727966, | |
| "learning_rate": 6.11764705882353e-06, | |
| "loss": 0.5735, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.39037433155080214, | |
| "grad_norm": 0.6707322001457214, | |
| "learning_rate": 6.106951871657755e-06, | |
| "loss": 0.5693, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.39144385026737966, | |
| "grad_norm": 0.5638059973716736, | |
| "learning_rate": 6.096256684491979e-06, | |
| "loss": 0.6027, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.3925133689839572, | |
| "grad_norm": 0.5538821220397949, | |
| "learning_rate": 6.085561497326204e-06, | |
| "loss": 0.3893, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.39358288770053473, | |
| "grad_norm": 0.6310197710990906, | |
| "learning_rate": 6.074866310160429e-06, | |
| "loss": 0.601, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.3946524064171123, | |
| "grad_norm": 0.5595915913581848, | |
| "learning_rate": 6.064171122994653e-06, | |
| "loss": 0.4418, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.39572192513368987, | |
| "grad_norm": 0.5133754014968872, | |
| "learning_rate": 6.053475935828878e-06, | |
| "loss": 0.4101, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3967914438502674, | |
| "grad_norm": 0.7005095481872559, | |
| "learning_rate": 6.0427807486631015e-06, | |
| "loss": 0.5643, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.39786096256684494, | |
| "grad_norm": 0.7338157892227173, | |
| "learning_rate": 6.032085561497326e-06, | |
| "loss": 0.4317, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.39893048128342246, | |
| "grad_norm": 0.6107657551765442, | |
| "learning_rate": 6.021390374331551e-06, | |
| "loss": 0.4567, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.1771106719970703, | |
| "learning_rate": 6.010695187165776e-06, | |
| "loss": 0.5178, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.40106951871657753, | |
| "grad_norm": 0.7156233191490173, | |
| "learning_rate": 6e-06, | |
| "loss": 0.4846, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.4021390374331551, | |
| "grad_norm": 0.6187387704849243, | |
| "learning_rate": 5.989304812834225e-06, | |
| "loss": 0.4512, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.4032085561497326, | |
| "grad_norm": 0.5896373987197876, | |
| "learning_rate": 5.97860962566845e-06, | |
| "loss": 0.4348, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.4042780748663102, | |
| "grad_norm": 0.5796601176261902, | |
| "learning_rate": 5.967914438502674e-06, | |
| "loss": 0.5447, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.4053475935828877, | |
| "grad_norm": 0.6080489754676819, | |
| "learning_rate": 5.957219251336899e-06, | |
| "loss": 0.5379, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.40641711229946526, | |
| "grad_norm": 0.5888751745223999, | |
| "learning_rate": 5.946524064171123e-06, | |
| "loss": 0.4544, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.40748663101604277, | |
| "grad_norm": 0.5136865973472595, | |
| "learning_rate": 5.935828877005349e-06, | |
| "loss": 0.429, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.40855614973262033, | |
| "grad_norm": 0.6180446147918701, | |
| "learning_rate": 5.925133689839573e-06, | |
| "loss": 0.3396, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.40962566844919784, | |
| "grad_norm": 0.6353482007980347, | |
| "learning_rate": 5.9144385026737975e-06, | |
| "loss": 0.4503, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.4106951871657754, | |
| "grad_norm": 0.6658486127853394, | |
| "learning_rate": 5.903743315508021e-06, | |
| "loss": 0.555, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.4117647058823529, | |
| "grad_norm": 0.5664384365081787, | |
| "learning_rate": 5.893048128342247e-06, | |
| "loss": 0.4569, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.4128342245989305, | |
| "grad_norm": 0.7459322810173035, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.3339, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.413903743315508, | |
| "grad_norm": 0.646549642086029, | |
| "learning_rate": 5.871657754010695e-06, | |
| "loss": 0.4741, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.41497326203208557, | |
| "grad_norm": 0.604892373085022, | |
| "learning_rate": 5.86096256684492e-06, | |
| "loss": 0.5714, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.4160427807486631, | |
| "grad_norm": 0.6231994032859802, | |
| "learning_rate": 5.850267379679145e-06, | |
| "loss": 0.4074, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.41711229946524064, | |
| "grad_norm": 0.6247087717056274, | |
| "learning_rate": 5.83957219251337e-06, | |
| "loss": 0.5266, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.41818181818181815, | |
| "grad_norm": 0.5631265640258789, | |
| "learning_rate": 5.828877005347594e-06, | |
| "loss": 0.402, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.4192513368983957, | |
| "grad_norm": 0.6223865151405334, | |
| "learning_rate": 5.8181818181818185e-06, | |
| "loss": 0.5206, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.4203208556149733, | |
| "grad_norm": 1.2764898538589478, | |
| "learning_rate": 5.807486631016043e-06, | |
| "loss": 0.5905, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.4213903743315508, | |
| "grad_norm": 0.7152879238128662, | |
| "learning_rate": 5.796791443850268e-06, | |
| "loss": 0.4609, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.42245989304812837, | |
| "grad_norm": 0.6570881009101868, | |
| "learning_rate": 5.786096256684493e-06, | |
| "loss": 0.451, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.4235294117647059, | |
| "grad_norm": 0.5888280868530273, | |
| "learning_rate": 5.775401069518717e-06, | |
| "loss": 0.4693, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.42459893048128344, | |
| "grad_norm": 0.5495673418045044, | |
| "learning_rate": 5.764705882352941e-06, | |
| "loss": 0.4068, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.42566844919786095, | |
| "grad_norm": 0.6043405532836914, | |
| "learning_rate": 5.754010695187167e-06, | |
| "loss": 0.5144, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.4267379679144385, | |
| "grad_norm": 0.631757915019989, | |
| "learning_rate": 5.743315508021391e-06, | |
| "loss": 0.4922, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.42780748663101603, | |
| "grad_norm": 0.6288698315620422, | |
| "learning_rate": 5.732620320855615e-06, | |
| "loss": 0.3284, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4288770053475936, | |
| "grad_norm": 0.9011797904968262, | |
| "learning_rate": 5.7219251336898395e-06, | |
| "loss": 0.4882, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.4299465240641711, | |
| "grad_norm": 0.5572121143341064, | |
| "learning_rate": 5.711229946524065e-06, | |
| "loss": 0.3964, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.4310160427807487, | |
| "grad_norm": 0.6986980438232422, | |
| "learning_rate": 5.700534759358289e-06, | |
| "loss": 0.4919, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4320855614973262, | |
| "grad_norm": 0.565965473651886, | |
| "learning_rate": 5.689839572192514e-06, | |
| "loss": 0.3932, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.43315508021390375, | |
| "grad_norm": 0.5683202147483826, | |
| "learning_rate": 5.679144385026738e-06, | |
| "loss": 0.5549, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.43422459893048126, | |
| "grad_norm": 0.6307051777839661, | |
| "learning_rate": 5.6684491978609635e-06, | |
| "loss": 0.5081, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.43529411764705883, | |
| "grad_norm": 0.6185327768325806, | |
| "learning_rate": 5.657754010695188e-06, | |
| "loss": 0.5963, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.43636363636363634, | |
| "grad_norm": 0.7134799361228943, | |
| "learning_rate": 5.6470588235294125e-06, | |
| "loss": 0.4978, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.4374331550802139, | |
| "grad_norm": 0.86156165599823, | |
| "learning_rate": 5.636363636363636e-06, | |
| "loss": 0.5062, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.4385026737967914, | |
| "grad_norm": 0.6102122664451599, | |
| "learning_rate": 5.625668449197862e-06, | |
| "loss": 0.4606, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.439572192513369, | |
| "grad_norm": 0.618861734867096, | |
| "learning_rate": 5.614973262032086e-06, | |
| "loss": 0.5782, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.4406417112299465, | |
| "grad_norm": 0.40522196888923645, | |
| "learning_rate": 5.60427807486631e-06, | |
| "loss": 0.3231, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.44171122994652406, | |
| "grad_norm": 0.5960280299186707, | |
| "learning_rate": 5.593582887700535e-06, | |
| "loss": 0.4579, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.44278074866310163, | |
| "grad_norm": 0.535601794719696, | |
| "learning_rate": 5.58288770053476e-06, | |
| "loss": 0.4933, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.44385026737967914, | |
| "grad_norm": 0.6177385449409485, | |
| "learning_rate": 5.5721925133689845e-06, | |
| "loss": 0.4592, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.4449197860962567, | |
| "grad_norm": 0.7037109136581421, | |
| "learning_rate": 5.561497326203209e-06, | |
| "loss": 0.5141, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.4459893048128342, | |
| "grad_norm": 0.8085152506828308, | |
| "learning_rate": 5.5508021390374335e-06, | |
| "loss": 0.5396, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.4470588235294118, | |
| "grad_norm": 0.6901976466178894, | |
| "learning_rate": 5.540106951871658e-06, | |
| "loss": 0.4816, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.4481283422459893, | |
| "grad_norm": 0.664743185043335, | |
| "learning_rate": 5.529411764705883e-06, | |
| "loss": 0.5651, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.44919786096256686, | |
| "grad_norm": 0.636893093585968, | |
| "learning_rate": 5.518716577540108e-06, | |
| "loss": 0.341, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4502673796791444, | |
| "grad_norm": 0.6073099970817566, | |
| "learning_rate": 5.508021390374332e-06, | |
| "loss": 0.5371, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.45133689839572194, | |
| "grad_norm": 0.5635761618614197, | |
| "learning_rate": 5.497326203208556e-06, | |
| "loss": 0.436, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.45240641711229945, | |
| "grad_norm": 0.516143798828125, | |
| "learning_rate": 5.486631016042782e-06, | |
| "loss": 0.4844, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.453475935828877, | |
| "grad_norm": 0.5602184534072876, | |
| "learning_rate": 5.4759358288770055e-06, | |
| "loss": 0.4687, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 0.6043605208396912, | |
| "learning_rate": 5.46524064171123e-06, | |
| "loss": 0.4223, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.4556149732620321, | |
| "grad_norm": 0.6659866571426392, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 0.5375, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.4566844919786096, | |
| "grad_norm": 0.614540159702301, | |
| "learning_rate": 5.44385026737968e-06, | |
| "loss": 0.4243, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.4577540106951872, | |
| "grad_norm": 0.6409124135971069, | |
| "learning_rate": 5.433155080213904e-06, | |
| "loss": 0.5356, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.4588235294117647, | |
| "grad_norm": 0.6931154727935791, | |
| "learning_rate": 5.422459893048129e-06, | |
| "loss": 0.5277, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.45989304812834225, | |
| "grad_norm": 0.575573742389679, | |
| "learning_rate": 5.411764705882353e-06, | |
| "loss": 0.3879, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.46096256684491976, | |
| "grad_norm": 0.5699415802955627, | |
| "learning_rate": 5.4010695187165785e-06, | |
| "loss": 0.4885, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.46203208556149733, | |
| "grad_norm": 0.5688080191612244, | |
| "learning_rate": 5.390374331550803e-06, | |
| "loss": 0.4474, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.46310160427807484, | |
| "grad_norm": 0.677470862865448, | |
| "learning_rate": 5.379679144385027e-06, | |
| "loss": 0.5074, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.4641711229946524, | |
| "grad_norm": 0.5586458444595337, | |
| "learning_rate": 5.368983957219252e-06, | |
| "loss": 0.4109, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.46524064171123, | |
| "grad_norm": 0.607815682888031, | |
| "learning_rate": 5.358288770053477e-06, | |
| "loss": 0.578, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4663101604278075, | |
| "grad_norm": 0.5579704642295837, | |
| "learning_rate": 5.347593582887702e-06, | |
| "loss": 0.495, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.46737967914438505, | |
| "grad_norm": 0.5998441576957703, | |
| "learning_rate": 5.336898395721925e-06, | |
| "loss": 0.4673, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.46844919786096256, | |
| "grad_norm": 0.7160207629203796, | |
| "learning_rate": 5.32620320855615e-06, | |
| "loss": 0.4379, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.46951871657754013, | |
| "grad_norm": 0.6560221910476685, | |
| "learning_rate": 5.315508021390374e-06, | |
| "loss": 0.5049, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.47058823529411764, | |
| "grad_norm": 0.7829013466835022, | |
| "learning_rate": 5.3048128342245995e-06, | |
| "loss": 0.4088, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4716577540106952, | |
| "grad_norm": 0.5732434988021851, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 0.3602, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.4727272727272727, | |
| "grad_norm": 0.7055409550666809, | |
| "learning_rate": 5.283422459893048e-06, | |
| "loss": 0.5973, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.4737967914438503, | |
| "grad_norm": 0.6110925674438477, | |
| "learning_rate": 5.272727272727273e-06, | |
| "loss": 0.5104, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.4748663101604278, | |
| "grad_norm": 0.6062266230583191, | |
| "learning_rate": 5.262032085561498e-06, | |
| "loss": 0.5375, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.47593582887700536, | |
| "grad_norm": 0.6112857460975647, | |
| "learning_rate": 5.251336898395723e-06, | |
| "loss": 0.3844, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4770053475935829, | |
| "grad_norm": 0.5855352282524109, | |
| "learning_rate": 5.240641711229947e-06, | |
| "loss": 0.4503, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.47807486631016044, | |
| "grad_norm": 0.8795517086982727, | |
| "learning_rate": 5.2299465240641715e-06, | |
| "loss": 0.5495, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.47914438502673795, | |
| "grad_norm": 0.6265493035316467, | |
| "learning_rate": 5.219251336898397e-06, | |
| "loss": 0.565, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.4802139037433155, | |
| "grad_norm": 0.6219659447669983, | |
| "learning_rate": 5.208556149732621e-06, | |
| "loss": 0.5312, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.48128342245989303, | |
| "grad_norm": 0.5924069285392761, | |
| "learning_rate": 5.197860962566845e-06, | |
| "loss": 0.468, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4823529411764706, | |
| "grad_norm": 0.6460242867469788, | |
| "learning_rate": 5.187165775401069e-06, | |
| "loss": 0.457, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.4834224598930481, | |
| "grad_norm": 0.5959609150886536, | |
| "learning_rate": 5.176470588235295e-06, | |
| "loss": 0.4589, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.4844919786096257, | |
| "grad_norm": 0.6619303226470947, | |
| "learning_rate": 5.165775401069519e-06, | |
| "loss": 0.5471, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.4855614973262032, | |
| "grad_norm": 0.5581579208374023, | |
| "learning_rate": 5.155080213903744e-06, | |
| "loss": 0.465, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.48663101604278075, | |
| "grad_norm": 0.6513838171958923, | |
| "learning_rate": 5.144385026737968e-06, | |
| "loss": 0.5621, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.48770053475935826, | |
| "grad_norm": 0.7988895177841187, | |
| "learning_rate": 5.133689839572193e-06, | |
| "loss": 0.4723, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.48877005347593583, | |
| "grad_norm": 0.5997406840324402, | |
| "learning_rate": 5.122994652406418e-06, | |
| "loss": 0.5598, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.4898395721925134, | |
| "grad_norm": 0.5303046107292175, | |
| "learning_rate": 5.112299465240642e-06, | |
| "loss": 0.4077, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.4909090909090909, | |
| "grad_norm": 0.514218270778656, | |
| "learning_rate": 5.101604278074867e-06, | |
| "loss": 0.477, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.4919786096256685, | |
| "grad_norm": 1.0928460359573364, | |
| "learning_rate": 5.090909090909091e-06, | |
| "loss": 0.3864, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.493048128342246, | |
| "grad_norm": 0.642814040184021, | |
| "learning_rate": 5.0802139037433165e-06, | |
| "loss": 0.489, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.49411764705882355, | |
| "grad_norm": 0.613659679889679, | |
| "learning_rate": 5.069518716577541e-06, | |
| "loss": 0.5542, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.49518716577540106, | |
| "grad_norm": 0.5729937553405762, | |
| "learning_rate": 5.058823529411765e-06, | |
| "loss": 0.4318, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.49625668449197863, | |
| "grad_norm": 0.6430114507675171, | |
| "learning_rate": 5.048128342245989e-06, | |
| "loss": 0.5106, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.49732620320855614, | |
| "grad_norm": 0.6959044933319092, | |
| "learning_rate": 5.037433155080214e-06, | |
| "loss": 0.5115, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4983957219251337, | |
| "grad_norm": 0.6900427937507629, | |
| "learning_rate": 5.026737967914439e-06, | |
| "loss": 0.537, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.4994652406417112, | |
| "grad_norm": 0.6781838536262512, | |
| "learning_rate": 5.016042780748663e-06, | |
| "loss": 0.595, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.5005347593582887, | |
| "grad_norm": 0.5492112636566162, | |
| "learning_rate": 5.005347593582888e-06, | |
| "loss": 0.4125, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.5016042780748663, | |
| "grad_norm": 0.6954774260520935, | |
| "learning_rate": 4.994652406417112e-06, | |
| "loss": 0.55, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.5026737967914439, | |
| "grad_norm": 0.5304185152053833, | |
| "learning_rate": 4.9839572192513375e-06, | |
| "loss": 0.5007, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5037433155080214, | |
| "grad_norm": 0.6195251941680908, | |
| "learning_rate": 4.973262032085562e-06, | |
| "loss": 0.5342, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.5048128342245989, | |
| "grad_norm": 0.5453325510025024, | |
| "learning_rate": 4.9625668449197864e-06, | |
| "loss": 0.4789, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.5058823529411764, | |
| "grad_norm": 0.6104117631912231, | |
| "learning_rate": 4.951871657754011e-06, | |
| "loss": 0.4497, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.506951871657754, | |
| "grad_norm": 0.611249566078186, | |
| "learning_rate": 4.941176470588236e-06, | |
| "loss": 0.3875, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.5080213903743316, | |
| "grad_norm": 0.5313235521316528, | |
| "learning_rate": 4.93048128342246e-06, | |
| "loss": 0.4711, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.509090909090909, | |
| "grad_norm": 0.6234943866729736, | |
| "learning_rate": 4.919786096256685e-06, | |
| "loss": 0.3389, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.5101604278074866, | |
| "grad_norm": 0.732172429561615, | |
| "learning_rate": 4.90909090909091e-06, | |
| "loss": 0.4938, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.5112299465240642, | |
| "grad_norm": 0.5638684630393982, | |
| "learning_rate": 4.898395721925134e-06, | |
| "loss": 0.4353, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.5122994652406417, | |
| "grad_norm": 0.6484448909759521, | |
| "learning_rate": 4.8877005347593585e-06, | |
| "loss": 0.4658, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.5133689839572193, | |
| "grad_norm": 6.943800926208496, | |
| "learning_rate": 4.877005347593583e-06, | |
| "loss": 0.4926, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5144385026737968, | |
| "grad_norm": 0.5177822709083557, | |
| "learning_rate": 4.866310160427808e-06, | |
| "loss": 0.3964, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.5155080213903743, | |
| "grad_norm": 0.6676076650619507, | |
| "learning_rate": 4.855614973262032e-06, | |
| "loss": 0.3927, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.5165775401069519, | |
| "grad_norm": 0.6472482085227966, | |
| "learning_rate": 4.844919786096257e-06, | |
| "loss": 0.57, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.5176470588235295, | |
| "grad_norm": 0.6518506407737732, | |
| "learning_rate": 4.834224598930482e-06, | |
| "loss": 0.4084, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.5187165775401069, | |
| "grad_norm": 4.733760356903076, | |
| "learning_rate": 4.823529411764706e-06, | |
| "loss": 0.486, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.5197860962566845, | |
| "grad_norm": 0.6562100648880005, | |
| "learning_rate": 4.812834224598931e-06, | |
| "loss": 0.5115, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.520855614973262, | |
| "grad_norm": 0.6982669830322266, | |
| "learning_rate": 4.802139037433156e-06, | |
| "loss": 0.5227, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.5219251336898396, | |
| "grad_norm": 0.6307410001754761, | |
| "learning_rate": 4.7914438502673795e-06, | |
| "loss": 0.4233, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.5229946524064171, | |
| "grad_norm": 0.6658247113227844, | |
| "learning_rate": 4.780748663101605e-06, | |
| "loss": 0.582, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.5240641711229946, | |
| "grad_norm": 0.6113049387931824, | |
| "learning_rate": 4.770053475935829e-06, | |
| "loss": 0.6194, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5251336898395722, | |
| "grad_norm": 0.6410903930664062, | |
| "learning_rate": 4.759358288770054e-06, | |
| "loss": 0.4335, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.5262032085561498, | |
| "grad_norm": 0.6378293633460999, | |
| "learning_rate": 4.748663101604278e-06, | |
| "loss": 0.4012, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.5272727272727272, | |
| "grad_norm": 0.6036868095397949, | |
| "learning_rate": 4.7379679144385035e-06, | |
| "loss": 0.4209, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.5283422459893048, | |
| "grad_norm": 1.0075055360794067, | |
| "learning_rate": 4.727272727272728e-06, | |
| "loss": 0.4729, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.5294117647058824, | |
| "grad_norm": 0.7693398594856262, | |
| "learning_rate": 4.7165775401069524e-06, | |
| "loss": 0.4365, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5304812834224599, | |
| "grad_norm": 0.6089740991592407, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.451, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.5315508021390374, | |
| "grad_norm": 0.6565570831298828, | |
| "learning_rate": 4.695187165775401e-06, | |
| "loss": 0.4581, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.532620320855615, | |
| "grad_norm": 0.5187976956367493, | |
| "learning_rate": 4.684491978609626e-06, | |
| "loss": 0.3428, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.5336898395721925, | |
| "grad_norm": 0.5061827898025513, | |
| "learning_rate": 4.673796791443851e-06, | |
| "loss": 0.2782, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.5347593582887701, | |
| "grad_norm": 0.7005665302276611, | |
| "learning_rate": 4.663101604278076e-06, | |
| "loss": 0.623, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5358288770053476, | |
| "grad_norm": 0.6551154255867004, | |
| "learning_rate": 4.6524064171123e-06, | |
| "loss": 0.4099, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.5368983957219251, | |
| "grad_norm": 0.5885181427001953, | |
| "learning_rate": 4.6417112299465245e-06, | |
| "loss": 0.4826, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.5379679144385027, | |
| "grad_norm": 0.6168848872184753, | |
| "learning_rate": 4.631016042780749e-06, | |
| "loss": 0.4298, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.5390374331550802, | |
| "grad_norm": 0.6331405639648438, | |
| "learning_rate": 4.6203208556149734e-06, | |
| "loss": 0.5538, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.5401069518716578, | |
| "grad_norm": 0.6529188752174377, | |
| "learning_rate": 4.609625668449198e-06, | |
| "loss": 0.4448, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.5411764705882353, | |
| "grad_norm": 0.6944399476051331, | |
| "learning_rate": 4.598930481283423e-06, | |
| "loss": 0.4571, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.5422459893048128, | |
| "grad_norm": 0.5985727906227112, | |
| "learning_rate": 4.588235294117647e-06, | |
| "loss": 0.4476, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.5433155080213904, | |
| "grad_norm": 0.7181333899497986, | |
| "learning_rate": 4.577540106951872e-06, | |
| "loss": 0.3491, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.544385026737968, | |
| "grad_norm": 0.6864769458770752, | |
| "learning_rate": 4.566844919786097e-06, | |
| "loss": 0.5396, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.8590689897537231, | |
| "learning_rate": 4.556149732620321e-06, | |
| "loss": 0.5902, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.546524064171123, | |
| "grad_norm": 0.6965978145599365, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.4242, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.5475935828877005, | |
| "grad_norm": 0.5799986124038696, | |
| "learning_rate": 4.534759358288771e-06, | |
| "loss": 0.3804, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.5486631016042781, | |
| "grad_norm": 0.6321996450424194, | |
| "learning_rate": 4.524064171122995e-06, | |
| "loss": 0.4651, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.5497326203208556, | |
| "grad_norm": 0.8161740303039551, | |
| "learning_rate": 4.51336898395722e-06, | |
| "loss": 0.5245, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.5508021390374331, | |
| "grad_norm": 0.7632600665092468, | |
| "learning_rate": 4.502673796791444e-06, | |
| "loss": 0.5805, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5518716577540107, | |
| "grad_norm": 0.6970545649528503, | |
| "learning_rate": 4.491978609625669e-06, | |
| "loss": 0.473, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.5529411764705883, | |
| "grad_norm": 0.5965619683265686, | |
| "learning_rate": 4.481283422459893e-06, | |
| "loss": 0.4138, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.5540106951871657, | |
| "grad_norm": 0.6775331497192383, | |
| "learning_rate": 4.4705882352941184e-06, | |
| "loss": 0.3954, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.5550802139037433, | |
| "grad_norm": 0.669482409954071, | |
| "learning_rate": 4.459893048128343e-06, | |
| "loss": 0.4952, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.5561497326203209, | |
| "grad_norm": 0.6086430549621582, | |
| "learning_rate": 4.449197860962567e-06, | |
| "loss": 0.462, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5572192513368984, | |
| "grad_norm": 0.6787458062171936, | |
| "learning_rate": 4.438502673796792e-06, | |
| "loss": 0.4218, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.558288770053476, | |
| "grad_norm": 0.6321885585784912, | |
| "learning_rate": 4.427807486631016e-06, | |
| "loss": 0.4159, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.5593582887700534, | |
| "grad_norm": 0.5498648881912231, | |
| "learning_rate": 4.417112299465241e-06, | |
| "loss": 0.5191, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.560427807486631, | |
| "grad_norm": 0.5822021961212158, | |
| "learning_rate": 4.406417112299465e-06, | |
| "loss": 0.4003, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.5614973262032086, | |
| "grad_norm": 0.9705522060394287, | |
| "learning_rate": 4.3957219251336905e-06, | |
| "loss": 0.5514, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5625668449197861, | |
| "grad_norm": 0.8319284319877625, | |
| "learning_rate": 4.385026737967915e-06, | |
| "loss": 0.362, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.5636363636363636, | |
| "grad_norm": 0.621861457824707, | |
| "learning_rate": 4.3743315508021394e-06, | |
| "loss": 0.4771, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.5647058823529412, | |
| "grad_norm": 0.5976999402046204, | |
| "learning_rate": 4.363636363636364e-06, | |
| "loss": 0.3489, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.5657754010695187, | |
| "grad_norm": 0.6107754111289978, | |
| "learning_rate": 4.352941176470588e-06, | |
| "loss": 0.5298, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.5668449197860963, | |
| "grad_norm": 0.5526769161224365, | |
| "learning_rate": 4.342245989304813e-06, | |
| "loss": 0.4193, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5679144385026738, | |
| "grad_norm": 0.6475231647491455, | |
| "learning_rate": 4.331550802139038e-06, | |
| "loss": 0.4781, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.5689839572192513, | |
| "grad_norm": 0.4877158999443054, | |
| "learning_rate": 4.320855614973263e-06, | |
| "loss": 0.3924, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.5700534759358289, | |
| "grad_norm": 0.8760375380516052, | |
| "learning_rate": 4.310160427807487e-06, | |
| "loss": 0.5446, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.5711229946524065, | |
| "grad_norm": 0.6752333045005798, | |
| "learning_rate": 4.2994652406417115e-06, | |
| "loss": 0.2875, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.5721925133689839, | |
| "grad_norm": 0.8983691334724426, | |
| "learning_rate": 4.288770053475936e-06, | |
| "loss": 0.4903, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5732620320855615, | |
| "grad_norm": 0.7159671783447266, | |
| "learning_rate": 4.2780748663101604e-06, | |
| "loss": 0.4185, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.574331550802139, | |
| "grad_norm": 0.5499292612075806, | |
| "learning_rate": 4.267379679144386e-06, | |
| "loss": 0.3959, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.5754010695187166, | |
| "grad_norm": 0.6133045554161072, | |
| "learning_rate": 4.25668449197861e-06, | |
| "loss": 0.3701, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.5764705882352941, | |
| "grad_norm": 0.44626930356025696, | |
| "learning_rate": 4.245989304812835e-06, | |
| "loss": 0.5138, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.5775401069518716, | |
| "grad_norm": 0.7441984415054321, | |
| "learning_rate": 4.235294117647059e-06, | |
| "loss": 0.5185, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5786096256684492, | |
| "grad_norm": 0.5134856700897217, | |
| "learning_rate": 4.224598930481284e-06, | |
| "loss": 0.5052, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.5796791443850268, | |
| "grad_norm": 0.5492193698883057, | |
| "learning_rate": 4.213903743315508e-06, | |
| "loss": 0.3639, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.5807486631016042, | |
| "grad_norm": 0.5567763447761536, | |
| "learning_rate": 4.203208556149733e-06, | |
| "loss": 0.501, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.5818181818181818, | |
| "grad_norm": 0.6094735264778137, | |
| "learning_rate": 4.192513368983958e-06, | |
| "loss": 0.3661, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.5828877005347594, | |
| "grad_norm": 0.6041865944862366, | |
| "learning_rate": 4.181818181818182e-06, | |
| "loss": 0.442, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5839572192513369, | |
| "grad_norm": 0.68603515625, | |
| "learning_rate": 4.171122994652407e-06, | |
| "loss": 0.5083, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.5850267379679145, | |
| "grad_norm": 0.5632447004318237, | |
| "learning_rate": 4.160427807486631e-06, | |
| "loss": 0.4473, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.586096256684492, | |
| "grad_norm": 0.8183507323265076, | |
| "learning_rate": 4.149732620320856e-06, | |
| "loss": 0.3873, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.5871657754010695, | |
| "grad_norm": 0.6135860085487366, | |
| "learning_rate": 4.13903743315508e-06, | |
| "loss": 0.4273, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.5882352941176471, | |
| "grad_norm": 0.6782481670379639, | |
| "learning_rate": 4.1283422459893054e-06, | |
| "loss": 0.5008, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5893048128342246, | |
| "grad_norm": 0.588299572467804, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 0.4437, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.5903743315508021, | |
| "grad_norm": 0.8137941360473633, | |
| "learning_rate": 4.106951871657754e-06, | |
| "loss": 0.5312, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.5914438502673797, | |
| "grad_norm": 0.5752719044685364, | |
| "learning_rate": 4.096256684491979e-06, | |
| "loss": 0.5033, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.5925133689839572, | |
| "grad_norm": 0.5931932330131531, | |
| "learning_rate": 4.085561497326203e-06, | |
| "loss": 0.3004, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.5935828877005348, | |
| "grad_norm": 0.8206865191459656, | |
| "learning_rate": 4.074866310160428e-06, | |
| "loss": 0.4438, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5946524064171123, | |
| "grad_norm": 0.6491235494613647, | |
| "learning_rate": 4.064171122994653e-06, | |
| "loss": 0.4961, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.5957219251336898, | |
| "grad_norm": 0.6359995007514954, | |
| "learning_rate": 4.0534759358288775e-06, | |
| "loss": 0.464, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.5967914438502674, | |
| "grad_norm": 0.5435465574264526, | |
| "learning_rate": 4.042780748663102e-06, | |
| "loss": 0.5067, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.597860962566845, | |
| "grad_norm": 0.6942026615142822, | |
| "learning_rate": 4.0320855614973264e-06, | |
| "loss": 0.5298, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.5989304812834224, | |
| "grad_norm": 0.5083630084991455, | |
| "learning_rate": 4.021390374331552e-06, | |
| "loss": 0.4064, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.9618167877197266, | |
| "learning_rate": 4.010695187165775e-06, | |
| "loss": 0.4733, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.6010695187165775, | |
| "grad_norm": 0.7117323875427246, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.4127, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.6021390374331551, | |
| "grad_norm": 0.6754978895187378, | |
| "learning_rate": 3.989304812834225e-06, | |
| "loss": 0.5834, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.6032085561497326, | |
| "grad_norm": 0.9293348789215088, | |
| "learning_rate": 3.97860962566845e-06, | |
| "loss": 0.3781, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.6042780748663101, | |
| "grad_norm": 0.6356310248374939, | |
| "learning_rate": 3.967914438502674e-06, | |
| "loss": 0.4295, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.6053475935828877, | |
| "grad_norm": 0.9707384705543518, | |
| "learning_rate": 3.957219251336899e-06, | |
| "loss": 0.4946, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.6064171122994653, | |
| "grad_norm": 0.8069990873336792, | |
| "learning_rate": 3.946524064171123e-06, | |
| "loss": 0.4581, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.6074866310160428, | |
| "grad_norm": 0.5745541453361511, | |
| "learning_rate": 3.9358288770053474e-06, | |
| "loss": 0.4484, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.6085561497326203, | |
| "grad_norm": 0.6778978705406189, | |
| "learning_rate": 3.925133689839573e-06, | |
| "loss": 0.4063, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.6096256684491979, | |
| "grad_norm": 0.6003164649009705, | |
| "learning_rate": 3.914438502673797e-06, | |
| "loss": 0.4939, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.6106951871657754, | |
| "grad_norm": 0.567144513130188, | |
| "learning_rate": 3.903743315508022e-06, | |
| "loss": 0.5082, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.611764705882353, | |
| "grad_norm": 0.6961466670036316, | |
| "learning_rate": 3.893048128342246e-06, | |
| "loss": 0.4296, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.6128342245989304, | |
| "grad_norm": 0.7187231183052063, | |
| "learning_rate": 3.882352941176471e-06, | |
| "loss": 0.4872, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.613903743315508, | |
| "grad_norm": 0.7099078893661499, | |
| "learning_rate": 3.871657754010695e-06, | |
| "loss": 0.3891, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.6149732620320856, | |
| "grad_norm": 0.6975756287574768, | |
| "learning_rate": 3.86096256684492e-06, | |
| "loss": 0.4818, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.6160427807486631, | |
| "grad_norm": 0.7316322326660156, | |
| "learning_rate": 3.850267379679145e-06, | |
| "loss": 0.5543, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.6171122994652406, | |
| "grad_norm": 0.5383787751197815, | |
| "learning_rate": 3.839572192513369e-06, | |
| "loss": 0.3826, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.6181818181818182, | |
| "grad_norm": 0.7244964241981506, | |
| "learning_rate": 3.828877005347594e-06, | |
| "loss": 0.5376, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.6192513368983957, | |
| "grad_norm": 0.5270150303840637, | |
| "learning_rate": 3.818181818181819e-06, | |
| "loss": 0.4312, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.6203208556149733, | |
| "grad_norm": 0.6970075964927673, | |
| "learning_rate": 3.807486631016043e-06, | |
| "loss": 0.5089, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6213903743315508, | |
| "grad_norm": 0.534832239151001, | |
| "learning_rate": 3.796791443850268e-06, | |
| "loss": 0.3596, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.6224598930481283, | |
| "grad_norm": 0.8076164722442627, | |
| "learning_rate": 3.786096256684492e-06, | |
| "loss": 0.4827, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.6235294117647059, | |
| "grad_norm": 0.5165025591850281, | |
| "learning_rate": 3.775401069518717e-06, | |
| "loss": 0.3259, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.6245989304812835, | |
| "grad_norm": 0.7118784189224243, | |
| "learning_rate": 3.7647058823529414e-06, | |
| "loss": 0.4214, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.6256684491978609, | |
| "grad_norm": 0.5382787585258484, | |
| "learning_rate": 3.7540106951871662e-06, | |
| "loss": 0.3696, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6267379679144385, | |
| "grad_norm": 0.5972786545753479, | |
| "learning_rate": 3.7433155080213907e-06, | |
| "loss": 0.3993, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.627807486631016, | |
| "grad_norm": 0.6584972143173218, | |
| "learning_rate": 3.7326203208556156e-06, | |
| "loss": 0.5942, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.6288770053475936, | |
| "grad_norm": 0.7586032152175903, | |
| "learning_rate": 3.72192513368984e-06, | |
| "loss": 0.5101, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.6299465240641712, | |
| "grad_norm": 0.5341634750366211, | |
| "learning_rate": 3.711229946524064e-06, | |
| "loss": 0.4506, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.6310160427807486, | |
| "grad_norm": 0.513986349105835, | |
| "learning_rate": 3.700534759358289e-06, | |
| "loss": 0.3564, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6320855614973262, | |
| "grad_norm": 0.5170319676399231, | |
| "learning_rate": 3.6898395721925134e-06, | |
| "loss": 0.399, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.6331550802139038, | |
| "grad_norm": 0.49844464659690857, | |
| "learning_rate": 3.6791443850267383e-06, | |
| "loss": 0.3651, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.6342245989304813, | |
| "grad_norm": 0.7075485587120056, | |
| "learning_rate": 3.6684491978609628e-06, | |
| "loss": 0.4524, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.6352941176470588, | |
| "grad_norm": 0.53248530626297, | |
| "learning_rate": 3.6577540106951877e-06, | |
| "loss": 0.4226, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.6363636363636364, | |
| "grad_norm": 0.6508234739303589, | |
| "learning_rate": 3.6470588235294117e-06, | |
| "loss": 0.4839, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6374331550802139, | |
| "grad_norm": 0.5128681063652039, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 0.4801, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.6385026737967915, | |
| "grad_norm": 0.6493229866027832, | |
| "learning_rate": 3.625668449197861e-06, | |
| "loss": 0.4779, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.6395721925133689, | |
| "grad_norm": 0.6966223120689392, | |
| "learning_rate": 3.614973262032086e-06, | |
| "loss": 0.5038, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.6406417112299465, | |
| "grad_norm": 0.5874669551849365, | |
| "learning_rate": 3.6042780748663104e-06, | |
| "loss": 0.4847, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.6417112299465241, | |
| "grad_norm": 0.6122252345085144, | |
| "learning_rate": 3.5935828877005353e-06, | |
| "loss": 0.446, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6427807486631016, | |
| "grad_norm": 0.5029746294021606, | |
| "learning_rate": 3.5828877005347597e-06, | |
| "loss": 0.4329, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.6438502673796791, | |
| "grad_norm": 0.7518463730812073, | |
| "learning_rate": 3.5721925133689846e-06, | |
| "loss": 0.5375, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.6449197860962567, | |
| "grad_norm": 0.6462073922157288, | |
| "learning_rate": 3.5614973262032087e-06, | |
| "loss": 0.4941, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.6459893048128342, | |
| "grad_norm": 0.4970465302467346, | |
| "learning_rate": 3.5508021390374335e-06, | |
| "loss": 0.4039, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.6470588235294118, | |
| "grad_norm": 0.6401532888412476, | |
| "learning_rate": 3.540106951871658e-06, | |
| "loss": 0.4924, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6481283422459893, | |
| "grad_norm": 0.5659219622612, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 0.4233, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.6491978609625668, | |
| "grad_norm": 0.5074617862701416, | |
| "learning_rate": 3.5187165775401074e-06, | |
| "loss": 0.4763, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.6502673796791444, | |
| "grad_norm": 0.8209864497184753, | |
| "learning_rate": 3.5080213903743322e-06, | |
| "loss": 0.4918, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.651336898395722, | |
| "grad_norm": 0.6214138865470886, | |
| "learning_rate": 3.4973262032085563e-06, | |
| "loss": 0.4164, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.6524064171122995, | |
| "grad_norm": 0.6655123829841614, | |
| "learning_rate": 3.4866310160427807e-06, | |
| "loss": 0.4937, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.653475935828877, | |
| "grad_norm": 0.5634870529174805, | |
| "learning_rate": 3.4759358288770056e-06, | |
| "loss": 0.3735, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.6545454545454545, | |
| "grad_norm": 0.544310450553894, | |
| "learning_rate": 3.46524064171123e-06, | |
| "loss": 0.3605, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.6556149732620321, | |
| "grad_norm": 0.6025211215019226, | |
| "learning_rate": 3.454545454545455e-06, | |
| "loss": 0.5662, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.6566844919786097, | |
| "grad_norm": 0.8766098618507385, | |
| "learning_rate": 3.443850267379679e-06, | |
| "loss": 0.4772, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.6577540106951871, | |
| "grad_norm": 0.6058558821678162, | |
| "learning_rate": 3.433155080213904e-06, | |
| "loss": 0.4949, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.6588235294117647, | |
| "grad_norm": 0.6009690165519714, | |
| "learning_rate": 3.4224598930481284e-06, | |
| "loss": 0.4107, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.6598930481283423, | |
| "grad_norm": 0.6540274024009705, | |
| "learning_rate": 3.4117647058823532e-06, | |
| "loss": 0.421, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.6609625668449198, | |
| "grad_norm": 0.504133939743042, | |
| "learning_rate": 3.4010695187165777e-06, | |
| "loss": 0.4268, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.6620320855614973, | |
| "grad_norm": 0.5908675193786621, | |
| "learning_rate": 3.3903743315508026e-06, | |
| "loss": 0.4188, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.6631016042780749, | |
| "grad_norm": 0.4493868052959442, | |
| "learning_rate": 3.379679144385027e-06, | |
| "loss": 0.3575, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6641711229946524, | |
| "grad_norm": 0.5610851049423218, | |
| "learning_rate": 3.368983957219252e-06, | |
| "loss": 0.3991, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.66524064171123, | |
| "grad_norm": 0.5496257543563843, | |
| "learning_rate": 3.358288770053476e-06, | |
| "loss": 0.4461, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.6663101604278074, | |
| "grad_norm": 0.7794954180717468, | |
| "learning_rate": 3.347593582887701e-06, | |
| "loss": 0.4539, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.667379679144385, | |
| "grad_norm": 0.8371284008026123, | |
| "learning_rate": 3.3368983957219253e-06, | |
| "loss": 0.4695, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.6684491978609626, | |
| "grad_norm": 0.6670841574668884, | |
| "learning_rate": 3.32620320855615e-06, | |
| "loss": 0.5121, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.6695187165775401, | |
| "grad_norm": 0.6398891806602478, | |
| "learning_rate": 3.3155080213903747e-06, | |
| "loss": 0.4468, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.6705882352941176, | |
| "grad_norm": 0.5581755042076111, | |
| "learning_rate": 3.3048128342245995e-06, | |
| "loss": 0.4537, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.6716577540106952, | |
| "grad_norm": 0.6236687302589417, | |
| "learning_rate": 3.2941176470588236e-06, | |
| "loss": 0.4326, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.6727272727272727, | |
| "grad_norm": 0.5394688844680786, | |
| "learning_rate": 3.2834224598930485e-06, | |
| "loss": 0.4004, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.6737967914438503, | |
| "grad_norm": 0.4413270354270935, | |
| "learning_rate": 3.272727272727273e-06, | |
| "loss": 0.3789, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6748663101604279, | |
| "grad_norm": 0.70003342628479, | |
| "learning_rate": 3.262032085561498e-06, | |
| "loss": 0.4753, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.6759358288770053, | |
| "grad_norm": 1.1233257055282593, | |
| "learning_rate": 3.2513368983957223e-06, | |
| "loss": 0.4321, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.6770053475935829, | |
| "grad_norm": 0.638300359249115, | |
| "learning_rate": 3.2406417112299467e-06, | |
| "loss": 0.5107, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.6780748663101605, | |
| "grad_norm": 0.7122930288314819, | |
| "learning_rate": 3.2299465240641716e-06, | |
| "loss": 0.5018, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.679144385026738, | |
| "grad_norm": 0.5772097110748291, | |
| "learning_rate": 3.2192513368983957e-06, | |
| "loss": 0.4046, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.6802139037433155, | |
| "grad_norm": 0.8683834075927734, | |
| "learning_rate": 3.2085561497326205e-06, | |
| "loss": 0.4708, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.681283422459893, | |
| "grad_norm": 0.608211100101471, | |
| "learning_rate": 3.197860962566845e-06, | |
| "loss": 0.4713, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.6823529411764706, | |
| "grad_norm": 0.5458659529685974, | |
| "learning_rate": 3.18716577540107e-06, | |
| "loss": 0.3805, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.6834224598930482, | |
| "grad_norm": 0.5004284977912903, | |
| "learning_rate": 3.1764705882352943e-06, | |
| "loss": 0.4711, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.6844919786096256, | |
| "grad_norm": 0.5213730335235596, | |
| "learning_rate": 3.1657754010695192e-06, | |
| "loss": 0.4014, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6855614973262032, | |
| "grad_norm": 0.5991172194480896, | |
| "learning_rate": 3.1550802139037433e-06, | |
| "loss": 0.4417, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.6866310160427808, | |
| "grad_norm": 0.6257739663124084, | |
| "learning_rate": 3.144385026737968e-06, | |
| "loss": 0.3944, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.6877005347593583, | |
| "grad_norm": 0.5935152173042297, | |
| "learning_rate": 3.1336898395721926e-06, | |
| "loss": 0.449, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.6887700534759358, | |
| "grad_norm": 0.5543785095214844, | |
| "learning_rate": 3.1229946524064175e-06, | |
| "loss": 0.4097, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.6898395721925134, | |
| "grad_norm": 0.8114000558853149, | |
| "learning_rate": 3.112299465240642e-06, | |
| "loss": 0.4094, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6909090909090909, | |
| "grad_norm": 1.1168849468231201, | |
| "learning_rate": 3.101604278074867e-06, | |
| "loss": 0.6077, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.6919786096256685, | |
| "grad_norm": 0.6375116109848022, | |
| "learning_rate": 3.090909090909091e-06, | |
| "loss": 0.6394, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.6930481283422459, | |
| "grad_norm": 0.6143141984939575, | |
| "learning_rate": 3.0802139037433158e-06, | |
| "loss": 0.4093, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.6941176470588235, | |
| "grad_norm": 0.5934880375862122, | |
| "learning_rate": 3.0695187165775402e-06, | |
| "loss": 0.4275, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.6951871657754011, | |
| "grad_norm": 1.2362698316574097, | |
| "learning_rate": 3.058823529411765e-06, | |
| "loss": 0.4986, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6962566844919786, | |
| "grad_norm": 0.5234349370002747, | |
| "learning_rate": 3.0481283422459896e-06, | |
| "loss": 0.376, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.6973262032085561, | |
| "grad_norm": 0.5769844651222229, | |
| "learning_rate": 3.0374331550802145e-06, | |
| "loss": 0.4575, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.6983957219251337, | |
| "grad_norm": 0.5550015568733215, | |
| "learning_rate": 3.026737967914439e-06, | |
| "loss": 0.4277, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.6994652406417112, | |
| "grad_norm": 0.666703999042511, | |
| "learning_rate": 3.016042780748663e-06, | |
| "loss": 0.5465, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.7005347593582888, | |
| "grad_norm": 0.5821000933647156, | |
| "learning_rate": 3.005347593582888e-06, | |
| "loss": 0.4779, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.7016042780748664, | |
| "grad_norm": 0.6663423180580139, | |
| "learning_rate": 2.9946524064171123e-06, | |
| "loss": 0.5036, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.7026737967914438, | |
| "grad_norm": 0.5093011260032654, | |
| "learning_rate": 2.983957219251337e-06, | |
| "loss": 0.3797, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.7037433155080214, | |
| "grad_norm": 0.5428939461708069, | |
| "learning_rate": 2.9732620320855617e-06, | |
| "loss": 0.4766, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.704812834224599, | |
| "grad_norm": 0.627892255783081, | |
| "learning_rate": 2.9625668449197865e-06, | |
| "loss": 0.4506, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.7058823529411765, | |
| "grad_norm": 0.9262791275978088, | |
| "learning_rate": 2.9518716577540106e-06, | |
| "loss": 0.5165, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.706951871657754, | |
| "grad_norm": 0.6016249656677246, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.4681, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.7080213903743315, | |
| "grad_norm": 0.5292555689811707, | |
| "learning_rate": 2.93048128342246e-06, | |
| "loss": 0.417, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.7090909090909091, | |
| "grad_norm": 0.5415362119674683, | |
| "learning_rate": 2.919786096256685e-06, | |
| "loss": 0.4329, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.7101604278074867, | |
| "grad_norm": 0.5623718500137329, | |
| "learning_rate": 2.9090909090909093e-06, | |
| "loss": 0.4849, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.7112299465240641, | |
| "grad_norm": 0.6125910878181458, | |
| "learning_rate": 2.898395721925134e-06, | |
| "loss": 0.4835, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.7122994652406417, | |
| "grad_norm": 0.6070640683174133, | |
| "learning_rate": 2.8877005347593586e-06, | |
| "loss": 0.3176, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.7133689839572193, | |
| "grad_norm": 0.5655325651168823, | |
| "learning_rate": 2.8770053475935835e-06, | |
| "loss": 0.4778, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.7144385026737968, | |
| "grad_norm": 0.5334764719009399, | |
| "learning_rate": 2.8663101604278075e-06, | |
| "loss": 0.5113, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.7155080213903743, | |
| "grad_norm": 0.6683606505393982, | |
| "learning_rate": 2.8556149732620324e-06, | |
| "loss": 0.5018, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.7165775401069518, | |
| "grad_norm": 0.5358517169952393, | |
| "learning_rate": 2.844919786096257e-06, | |
| "loss": 0.558, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.7176470588235294, | |
| "grad_norm": 0.45965051651000977, | |
| "learning_rate": 2.8342245989304818e-06, | |
| "loss": 0.4543, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.718716577540107, | |
| "grad_norm": 0.673067569732666, | |
| "learning_rate": 2.8235294117647062e-06, | |
| "loss": 0.4577, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.7197860962566844, | |
| "grad_norm": 0.5588769316673279, | |
| "learning_rate": 2.812834224598931e-06, | |
| "loss": 0.4897, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.720855614973262, | |
| "grad_norm": 0.7788881659507751, | |
| "learning_rate": 2.802139037433155e-06, | |
| "loss": 0.461, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.7219251336898396, | |
| "grad_norm": 0.6181241273880005, | |
| "learning_rate": 2.79144385026738e-06, | |
| "loss": 0.457, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.7229946524064171, | |
| "grad_norm": 0.6370013356208801, | |
| "learning_rate": 2.7807486631016045e-06, | |
| "loss": 0.5507, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.7240641711229947, | |
| "grad_norm": 0.6255584955215454, | |
| "learning_rate": 2.770053475935829e-06, | |
| "loss": 0.5962, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.7251336898395722, | |
| "grad_norm": 0.6348366141319275, | |
| "learning_rate": 2.759358288770054e-06, | |
| "loss": 0.5012, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.7262032085561497, | |
| "grad_norm": 0.7242496013641357, | |
| "learning_rate": 2.748663101604278e-06, | |
| "loss": 0.5706, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.5692611932754517, | |
| "learning_rate": 2.7379679144385028e-06, | |
| "loss": 0.5073, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7283422459893049, | |
| "grad_norm": 0.6152932047843933, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 0.5284, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.7294117647058823, | |
| "grad_norm": 0.570809543132782, | |
| "learning_rate": 2.716577540106952e-06, | |
| "loss": 0.4172, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.7304812834224599, | |
| "grad_norm": 0.6525633931159973, | |
| "learning_rate": 2.7058823529411766e-06, | |
| "loss": 0.4072, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.7315508021390374, | |
| "grad_norm": 0.6481879353523254, | |
| "learning_rate": 2.6951871657754015e-06, | |
| "loss": 0.3715, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.732620320855615, | |
| "grad_norm": 0.554436445236206, | |
| "learning_rate": 2.684491978609626e-06, | |
| "loss": 0.4586, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7336898395721925, | |
| "grad_norm": 0.4814223051071167, | |
| "learning_rate": 2.673796791443851e-06, | |
| "loss": 0.3511, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.73475935828877, | |
| "grad_norm": 0.6819257736206055, | |
| "learning_rate": 2.663101604278075e-06, | |
| "loss": 0.5651, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.7358288770053476, | |
| "grad_norm": 1.4043816328048706, | |
| "learning_rate": 2.6524064171122997e-06, | |
| "loss": 0.342, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.7368983957219252, | |
| "grad_norm": 0.4884723424911499, | |
| "learning_rate": 2.641711229946524e-06, | |
| "loss": 0.4258, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.7379679144385026, | |
| "grad_norm": 0.6273646950721741, | |
| "learning_rate": 2.631016042780749e-06, | |
| "loss": 0.4014, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7390374331550802, | |
| "grad_norm": 0.6172796487808228, | |
| "learning_rate": 2.6203208556149735e-06, | |
| "loss": 0.5249, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.7401069518716578, | |
| "grad_norm": 0.7721535563468933, | |
| "learning_rate": 2.6096256684491984e-06, | |
| "loss": 0.5513, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.7411764705882353, | |
| "grad_norm": 0.8145367503166199, | |
| "learning_rate": 2.5989304812834225e-06, | |
| "loss": 0.4547, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.7422459893048128, | |
| "grad_norm": 0.6289914846420288, | |
| "learning_rate": 2.5882352941176473e-06, | |
| "loss": 0.4626, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.7433155080213903, | |
| "grad_norm": 0.6151965856552124, | |
| "learning_rate": 2.577540106951872e-06, | |
| "loss": 0.4118, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7443850267379679, | |
| "grad_norm": 1.2651584148406982, | |
| "learning_rate": 2.5668449197860967e-06, | |
| "loss": 0.4684, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.7454545454545455, | |
| "grad_norm": 0.555983304977417, | |
| "learning_rate": 2.556149732620321e-06, | |
| "loss": 0.4346, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.746524064171123, | |
| "grad_norm": 0.6285813450813293, | |
| "learning_rate": 2.5454545454545456e-06, | |
| "loss": 0.3806, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.7475935828877005, | |
| "grad_norm": 0.524695098400116, | |
| "learning_rate": 2.5347593582887705e-06, | |
| "loss": 0.4698, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.7486631016042781, | |
| "grad_norm": 0.5982396602630615, | |
| "learning_rate": 2.5240641711229945e-06, | |
| "loss": 0.4765, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7497326203208556, | |
| "grad_norm": 0.675052285194397, | |
| "learning_rate": 2.5133689839572194e-06, | |
| "loss": 0.5129, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.7508021390374332, | |
| "grad_norm": 0.5992892384529114, | |
| "learning_rate": 2.502673796791444e-06, | |
| "loss": 0.5065, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.7518716577540107, | |
| "grad_norm": 1.234007477760315, | |
| "learning_rate": 2.4919786096256688e-06, | |
| "loss": 0.5474, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.7529411764705882, | |
| "grad_norm": 0.5893493294715881, | |
| "learning_rate": 2.4812834224598932e-06, | |
| "loss": 0.5441, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.7540106951871658, | |
| "grad_norm": 0.5642089247703552, | |
| "learning_rate": 2.470588235294118e-06, | |
| "loss": 0.5079, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7550802139037434, | |
| "grad_norm": 0.40583565831184387, | |
| "learning_rate": 2.4598930481283426e-06, | |
| "loss": 0.3579, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.7561497326203208, | |
| "grad_norm": 1.2628885507583618, | |
| "learning_rate": 2.449197860962567e-06, | |
| "loss": 0.5262, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.7572192513368984, | |
| "grad_norm": 0.631543755531311, | |
| "learning_rate": 2.4385026737967915e-06, | |
| "loss": 0.4551, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.758288770053476, | |
| "grad_norm": 0.66451495885849, | |
| "learning_rate": 2.427807486631016e-06, | |
| "loss": 0.4782, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.7593582887700535, | |
| "grad_norm": 0.5754415392875671, | |
| "learning_rate": 2.417112299465241e-06, | |
| "loss": 0.3357, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.760427807486631, | |
| "grad_norm": 0.5933724641799927, | |
| "learning_rate": 2.4064171122994653e-06, | |
| "loss": 0.4003, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.7614973262032085, | |
| "grad_norm": 0.6731705665588379, | |
| "learning_rate": 2.3957219251336898e-06, | |
| "loss": 0.5086, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.7625668449197861, | |
| "grad_norm": 0.5990017056465149, | |
| "learning_rate": 2.3850267379679146e-06, | |
| "loss": 0.2902, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.7636363636363637, | |
| "grad_norm": 0.48504629731178284, | |
| "learning_rate": 2.374331550802139e-06, | |
| "loss": 0.3837, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.7647058823529411, | |
| "grad_norm": 0.5568614602088928, | |
| "learning_rate": 2.363636363636364e-06, | |
| "loss": 0.3601, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.7657754010695187, | |
| "grad_norm": 0.5120856165885925, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.4955, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.7668449197860963, | |
| "grad_norm": 0.6382061839103699, | |
| "learning_rate": 2.342245989304813e-06, | |
| "loss": 0.5281, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.7679144385026738, | |
| "grad_norm": 0.5558910965919495, | |
| "learning_rate": 2.331550802139038e-06, | |
| "loss": 0.5116, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.7689839572192514, | |
| "grad_norm": 0.7900490164756775, | |
| "learning_rate": 2.3208556149732623e-06, | |
| "loss": 0.4145, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.7700534759358288, | |
| "grad_norm": 0.5656290054321289, | |
| "learning_rate": 2.3101604278074867e-06, | |
| "loss": 0.4718, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.7711229946524064, | |
| "grad_norm": 0.7339908480644226, | |
| "learning_rate": 2.2994652406417116e-06, | |
| "loss": 0.5291, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.772192513368984, | |
| "grad_norm": 0.41121137142181396, | |
| "learning_rate": 2.288770053475936e-06, | |
| "loss": 0.4186, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.7732620320855615, | |
| "grad_norm": 0.607227623462677, | |
| "learning_rate": 2.2780748663101605e-06, | |
| "loss": 0.4381, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.774331550802139, | |
| "grad_norm": 0.47620826959609985, | |
| "learning_rate": 2.2673796791443854e-06, | |
| "loss": 0.3108, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.7754010695187166, | |
| "grad_norm": 0.41767314076423645, | |
| "learning_rate": 2.25668449197861e-06, | |
| "loss": 0.3317, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.7764705882352941, | |
| "grad_norm": 0.5056875348091125, | |
| "learning_rate": 2.2459893048128343e-06, | |
| "loss": 0.4633, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.7775401069518717, | |
| "grad_norm": 0.5436606407165527, | |
| "learning_rate": 2.2352941176470592e-06, | |
| "loss": 0.5444, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.7786096256684492, | |
| "grad_norm": 0.5530479550361633, | |
| "learning_rate": 2.2245989304812837e-06, | |
| "loss": 0.4254, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.7796791443850267, | |
| "grad_norm": 0.6671726107597351, | |
| "learning_rate": 2.213903743315508e-06, | |
| "loss": 0.5859, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.7807486631016043, | |
| "grad_norm": 0.6353893280029297, | |
| "learning_rate": 2.2032085561497326e-06, | |
| "loss": 0.3919, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7818181818181819, | |
| "grad_norm": 0.5810169577598572, | |
| "learning_rate": 2.1925133689839575e-06, | |
| "loss": 0.4395, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.7828877005347593, | |
| "grad_norm": 0.5706460475921631, | |
| "learning_rate": 2.181818181818182e-06, | |
| "loss": 0.3874, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.7839572192513369, | |
| "grad_norm": 0.6459718942642212, | |
| "learning_rate": 2.1711229946524064e-06, | |
| "loss": 0.5013, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.7850267379679144, | |
| "grad_norm": 0.6937084197998047, | |
| "learning_rate": 2.1604278074866313e-06, | |
| "loss": 0.5787, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.786096256684492, | |
| "grad_norm": 0.5482848882675171, | |
| "learning_rate": 2.1497326203208558e-06, | |
| "loss": 0.5023, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.7871657754010695, | |
| "grad_norm": 0.476616233587265, | |
| "learning_rate": 2.1390374331550802e-06, | |
| "loss": 0.2771, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.788235294117647, | |
| "grad_norm": 0.8386345505714417, | |
| "learning_rate": 2.128342245989305e-06, | |
| "loss": 0.4683, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.7893048128342246, | |
| "grad_norm": 0.5627638697624207, | |
| "learning_rate": 2.1176470588235296e-06, | |
| "loss": 0.4312, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.7903743315508022, | |
| "grad_norm": 0.6266486048698425, | |
| "learning_rate": 2.106951871657754e-06, | |
| "loss": 0.3833, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.7914438502673797, | |
| "grad_norm": 0.5341743230819702, | |
| "learning_rate": 2.096256684491979e-06, | |
| "loss": 0.4757, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7925133689839572, | |
| "grad_norm": 0.7793628573417664, | |
| "learning_rate": 2.0855614973262034e-06, | |
| "loss": 0.4814, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.7935828877005348, | |
| "grad_norm": 0.4832534193992615, | |
| "learning_rate": 2.074866310160428e-06, | |
| "loss": 0.3972, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.7946524064171123, | |
| "grad_norm": 0.7946810126304626, | |
| "learning_rate": 2.0641711229946527e-06, | |
| "loss": 0.4231, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.7957219251336899, | |
| "grad_norm": 0.6672943234443665, | |
| "learning_rate": 2.053475935828877e-06, | |
| "loss": 0.3575, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.7967914438502673, | |
| "grad_norm": 0.5300989151000977, | |
| "learning_rate": 2.0427807486631016e-06, | |
| "loss": 0.4596, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.7978609625668449, | |
| "grad_norm": 0.656453013420105, | |
| "learning_rate": 2.0320855614973265e-06, | |
| "loss": 0.4804, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.7989304812834225, | |
| "grad_norm": 0.5788617730140686, | |
| "learning_rate": 2.021390374331551e-06, | |
| "loss": 0.4265, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.6547515988349915, | |
| "learning_rate": 2.010695187165776e-06, | |
| "loss": 0.3235, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.8010695187165775, | |
| "grad_norm": 0.6302303075790405, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.5115, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.8021390374331551, | |
| "grad_norm": 0.5152655243873596, | |
| "learning_rate": 1.989304812834225e-06, | |
| "loss": 0.4885, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8032085561497326, | |
| "grad_norm": 0.5326620936393738, | |
| "learning_rate": 1.9786096256684497e-06, | |
| "loss": 0.3846, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.8042780748663102, | |
| "grad_norm": 1.2811181545257568, | |
| "learning_rate": 1.9679144385026737e-06, | |
| "loss": 0.4065, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.8053475935828877, | |
| "grad_norm": 0.5953382849693298, | |
| "learning_rate": 1.9572192513368986e-06, | |
| "loss": 0.3441, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.8064171122994652, | |
| "grad_norm": 0.7036495804786682, | |
| "learning_rate": 1.946524064171123e-06, | |
| "loss": 0.467, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.8074866310160428, | |
| "grad_norm": 0.5551076531410217, | |
| "learning_rate": 1.9358288770053475e-06, | |
| "loss": 0.4128, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.8085561497326204, | |
| "grad_norm": 0.7753043174743652, | |
| "learning_rate": 1.9251336898395724e-06, | |
| "loss": 0.5599, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.8096256684491978, | |
| "grad_norm": 0.6135478615760803, | |
| "learning_rate": 1.914438502673797e-06, | |
| "loss": 0.4829, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.8106951871657754, | |
| "grad_norm": 0.49545517563819885, | |
| "learning_rate": 1.9037433155080215e-06, | |
| "loss": 0.4463, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.8117647058823529, | |
| "grad_norm": 0.6356120705604553, | |
| "learning_rate": 1.893048128342246e-06, | |
| "loss": 0.4677, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.8128342245989305, | |
| "grad_norm": 0.6208240389823914, | |
| "learning_rate": 1.8823529411764707e-06, | |
| "loss": 0.5298, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.813903743315508, | |
| "grad_norm": 0.5536626577377319, | |
| "learning_rate": 1.8716577540106954e-06, | |
| "loss": 0.4692, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.8149732620320855, | |
| "grad_norm": 1.6128660440444946, | |
| "learning_rate": 1.86096256684492e-06, | |
| "loss": 0.429, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.8160427807486631, | |
| "grad_norm": 0.5924348831176758, | |
| "learning_rate": 1.8502673796791445e-06, | |
| "loss": 0.5255, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.8171122994652407, | |
| "grad_norm": 0.6116847991943359, | |
| "learning_rate": 1.8395721925133692e-06, | |
| "loss": 0.5479, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.575383722782135, | |
| "learning_rate": 1.8288770053475938e-06, | |
| "loss": 0.5394, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.8192513368983957, | |
| "grad_norm": 0.6104905605316162, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 0.4413, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.8203208556149733, | |
| "grad_norm": 0.570062518119812, | |
| "learning_rate": 1.807486631016043e-06, | |
| "loss": 0.3342, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.8213903743315508, | |
| "grad_norm": 0.7175807356834412, | |
| "learning_rate": 1.7967914438502676e-06, | |
| "loss": 0.3569, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.8224598930481284, | |
| "grad_norm": 0.4351617991924286, | |
| "learning_rate": 1.7860962566844923e-06, | |
| "loss": 0.3941, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.8235294117647058, | |
| "grad_norm": 0.5576593279838562, | |
| "learning_rate": 1.7754010695187168e-06, | |
| "loss": 0.3717, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8245989304812834, | |
| "grad_norm": 0.8095917105674744, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 0.4179, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.825668449197861, | |
| "grad_norm": 0.8092955946922302, | |
| "learning_rate": 1.7540106951871661e-06, | |
| "loss": 0.4674, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.8267379679144385, | |
| "grad_norm": 0.7119324803352356, | |
| "learning_rate": 1.7433155080213904e-06, | |
| "loss": 0.5301, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.827807486631016, | |
| "grad_norm": 0.528871476650238, | |
| "learning_rate": 1.732620320855615e-06, | |
| "loss": 0.3606, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.8288770053475936, | |
| "grad_norm": 0.602125346660614, | |
| "learning_rate": 1.7219251336898395e-06, | |
| "loss": 0.5071, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.8299465240641711, | |
| "grad_norm": 0.6447128653526306, | |
| "learning_rate": 1.7112299465240642e-06, | |
| "loss": 0.4465, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.8310160427807487, | |
| "grad_norm": 0.6567676663398743, | |
| "learning_rate": 1.7005347593582888e-06, | |
| "loss": 0.3857, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.8320855614973262, | |
| "grad_norm": 0.5612752437591553, | |
| "learning_rate": 1.6898395721925135e-06, | |
| "loss": 0.483, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.8331550802139037, | |
| "grad_norm": 0.5748909711837769, | |
| "learning_rate": 1.679144385026738e-06, | |
| "loss": 0.4623, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.8342245989304813, | |
| "grad_norm": 0.5179409384727478, | |
| "learning_rate": 1.6684491978609627e-06, | |
| "loss": 0.4234, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8352941176470589, | |
| "grad_norm": 1.5632623434066772, | |
| "learning_rate": 1.6577540106951873e-06, | |
| "loss": 0.3918, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.8363636363636363, | |
| "grad_norm": 0.6550920605659485, | |
| "learning_rate": 1.6470588235294118e-06, | |
| "loss": 0.4887, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.8374331550802139, | |
| "grad_norm": 0.5957558155059814, | |
| "learning_rate": 1.6363636363636365e-06, | |
| "loss": 0.4395, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.8385026737967914, | |
| "grad_norm": 0.6081556677818298, | |
| "learning_rate": 1.6256684491978611e-06, | |
| "loss": 0.392, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.839572192513369, | |
| "grad_norm": 0.5359925031661987, | |
| "learning_rate": 1.6149732620320858e-06, | |
| "loss": 0.4054, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.8406417112299466, | |
| "grad_norm": 0.5983924865722656, | |
| "learning_rate": 1.6042780748663103e-06, | |
| "loss": 0.4888, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.841711229946524, | |
| "grad_norm": 0.8540737628936768, | |
| "learning_rate": 1.593582887700535e-06, | |
| "loss": 0.4941, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.8427807486631016, | |
| "grad_norm": 0.6049513220787048, | |
| "learning_rate": 1.5828877005347596e-06, | |
| "loss": 0.4474, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.8438502673796792, | |
| "grad_norm": 0.6652930378913879, | |
| "learning_rate": 1.572192513368984e-06, | |
| "loss": 0.406, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.8449197860962567, | |
| "grad_norm": 0.7171810269355774, | |
| "learning_rate": 1.5614973262032088e-06, | |
| "loss": 0.4592, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8459893048128342, | |
| "grad_norm": 0.6037563681602478, | |
| "learning_rate": 1.5508021390374334e-06, | |
| "loss": 0.4536, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.8470588235294118, | |
| "grad_norm": 0.5136492848396301, | |
| "learning_rate": 1.5401069518716579e-06, | |
| "loss": 0.4819, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.8481283422459893, | |
| "grad_norm": 0.640941858291626, | |
| "learning_rate": 1.5294117647058826e-06, | |
| "loss": 0.415, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.8491978609625669, | |
| "grad_norm": 0.9364274144172668, | |
| "learning_rate": 1.5187165775401072e-06, | |
| "loss": 0.483, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.8502673796791443, | |
| "grad_norm": 0.565367579460144, | |
| "learning_rate": 1.5080213903743315e-06, | |
| "loss": 0.3394, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8513368983957219, | |
| "grad_norm": 0.5467458963394165, | |
| "learning_rate": 1.4973262032085562e-06, | |
| "loss": 0.59, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.8524064171122995, | |
| "grad_norm": 0.638268232345581, | |
| "learning_rate": 1.4866310160427808e-06, | |
| "loss": 0.3641, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.853475935828877, | |
| "grad_norm": 0.5014296174049377, | |
| "learning_rate": 1.4759358288770053e-06, | |
| "loss": 0.379, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.8545454545454545, | |
| "grad_norm": 0.687904417514801, | |
| "learning_rate": 1.46524064171123e-06, | |
| "loss": 0.406, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.8556149732620321, | |
| "grad_norm": 0.5527435541152954, | |
| "learning_rate": 1.4545454545454546e-06, | |
| "loss": 0.3878, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8566844919786096, | |
| "grad_norm": 0.8585264682769775, | |
| "learning_rate": 1.4438502673796793e-06, | |
| "loss": 0.4813, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.8577540106951872, | |
| "grad_norm": 0.4947826862335205, | |
| "learning_rate": 1.4331550802139038e-06, | |
| "loss": 0.391, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.8588235294117647, | |
| "grad_norm": 0.5660549998283386, | |
| "learning_rate": 1.4224598930481284e-06, | |
| "loss": 0.403, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.8598930481283422, | |
| "grad_norm": 0.6671440601348877, | |
| "learning_rate": 1.4117647058823531e-06, | |
| "loss": 0.5851, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.8609625668449198, | |
| "grad_norm": 0.6218956112861633, | |
| "learning_rate": 1.4010695187165776e-06, | |
| "loss": 0.3969, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.8620320855614974, | |
| "grad_norm": 0.5715216398239136, | |
| "learning_rate": 1.3903743315508022e-06, | |
| "loss": 0.5312, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.8631016042780749, | |
| "grad_norm": 0.6772884726524353, | |
| "learning_rate": 1.379679144385027e-06, | |
| "loss": 0.4322, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.8641711229946524, | |
| "grad_norm": 0.6653510928153992, | |
| "learning_rate": 1.3689839572192514e-06, | |
| "loss": 0.4375, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.8652406417112299, | |
| "grad_norm": 1.0978738069534302, | |
| "learning_rate": 1.358288770053476e-06, | |
| "loss": 0.4823, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.8663101604278075, | |
| "grad_norm": 0.6090916991233826, | |
| "learning_rate": 1.3475935828877007e-06, | |
| "loss": 0.4204, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.8673796791443851, | |
| "grad_norm": 0.712502121925354, | |
| "learning_rate": 1.3368983957219254e-06, | |
| "loss": 0.4021, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.8684491978609625, | |
| "grad_norm": 0.5122483372688293, | |
| "learning_rate": 1.3262032085561499e-06, | |
| "loss": 0.4185, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.8695187165775401, | |
| "grad_norm": 0.5444067716598511, | |
| "learning_rate": 1.3155080213903745e-06, | |
| "loss": 0.5084, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.8705882352941177, | |
| "grad_norm": 0.6717526912689209, | |
| "learning_rate": 1.3048128342245992e-06, | |
| "loss": 0.4225, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.8716577540106952, | |
| "grad_norm": 0.5988174676895142, | |
| "learning_rate": 1.2941176470588237e-06, | |
| "loss": 0.4503, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.8727272727272727, | |
| "grad_norm": 0.6662685871124268, | |
| "learning_rate": 1.2834224598930483e-06, | |
| "loss": 0.4885, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.8737967914438503, | |
| "grad_norm": 0.6221483945846558, | |
| "learning_rate": 1.2727272727272728e-06, | |
| "loss": 0.3943, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.8748663101604278, | |
| "grad_norm": 0.7471879720687866, | |
| "learning_rate": 1.2620320855614973e-06, | |
| "loss": 0.511, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.8759358288770054, | |
| "grad_norm": 0.5364648699760437, | |
| "learning_rate": 1.251336898395722e-06, | |
| "loss": 0.4857, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.8770053475935828, | |
| "grad_norm": 0.5553902983665466, | |
| "learning_rate": 1.2406417112299466e-06, | |
| "loss": 0.3714, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.8780748663101604, | |
| "grad_norm": 0.7711859941482544, | |
| "learning_rate": 1.2299465240641713e-06, | |
| "loss": 0.4051, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.879144385026738, | |
| "grad_norm": 0.46325987577438354, | |
| "learning_rate": 1.2192513368983957e-06, | |
| "loss": 0.3949, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.8802139037433155, | |
| "grad_norm": 0.5807831883430481, | |
| "learning_rate": 1.2085561497326204e-06, | |
| "loss": 0.4564, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.881283422459893, | |
| "grad_norm": 0.5907157063484192, | |
| "learning_rate": 1.1978609625668449e-06, | |
| "loss": 0.4109, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.8823529411764706, | |
| "grad_norm": 0.6913987994194031, | |
| "learning_rate": 1.1871657754010696e-06, | |
| "loss": 0.5413, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.8834224598930481, | |
| "grad_norm": 0.8093897104263306, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 0.3716, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.8844919786096257, | |
| "grad_norm": 0.7236829996109009, | |
| "learning_rate": 1.165775401069519e-06, | |
| "loss": 0.5318, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.8855614973262033, | |
| "grad_norm": 0.8007799386978149, | |
| "learning_rate": 1.1550802139037434e-06, | |
| "loss": 0.345, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.8866310160427807, | |
| "grad_norm": 0.6973840594291687, | |
| "learning_rate": 1.144385026737968e-06, | |
| "loss": 0.562, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.8877005347593583, | |
| "grad_norm": 0.5584893822669983, | |
| "learning_rate": 1.1336898395721927e-06, | |
| "loss": 0.4715, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.8887700534759359, | |
| "grad_norm": 0.6192110776901245, | |
| "learning_rate": 1.1229946524064172e-06, | |
| "loss": 0.5356, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.8898395721925134, | |
| "grad_norm": 0.64601069688797, | |
| "learning_rate": 1.1122994652406418e-06, | |
| "loss": 0.4524, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.8909090909090909, | |
| "grad_norm": 0.7730070352554321, | |
| "learning_rate": 1.1016042780748663e-06, | |
| "loss": 0.5382, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.8919786096256684, | |
| "grad_norm": 0.5383297801017761, | |
| "learning_rate": 1.090909090909091e-06, | |
| "loss": 0.4557, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.893048128342246, | |
| "grad_norm": 0.5648770928382874, | |
| "learning_rate": 1.0802139037433156e-06, | |
| "loss": 0.4287, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.8941176470588236, | |
| "grad_norm": 0.5222188234329224, | |
| "learning_rate": 1.0695187165775401e-06, | |
| "loss": 0.3199, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.895187165775401, | |
| "grad_norm": 0.6378928422927856, | |
| "learning_rate": 1.0588235294117648e-06, | |
| "loss": 0.5067, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.8962566844919786, | |
| "grad_norm": 0.5975542068481445, | |
| "learning_rate": 1.0481283422459895e-06, | |
| "loss": 0.4706, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.8973262032085562, | |
| "grad_norm": 0.6841580271720886, | |
| "learning_rate": 1.037433155080214e-06, | |
| "loss": 0.4373, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.8983957219251337, | |
| "grad_norm": 0.7325679063796997, | |
| "learning_rate": 1.0267379679144386e-06, | |
| "loss": 0.4503, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.8994652406417112, | |
| "grad_norm": 0.5811144113540649, | |
| "learning_rate": 1.0160427807486633e-06, | |
| "loss": 0.4891, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.9005347593582887, | |
| "grad_norm": 0.5845391154289246, | |
| "learning_rate": 1.005347593582888e-06, | |
| "loss": 0.4503, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.9016042780748663, | |
| "grad_norm": 0.6408500075340271, | |
| "learning_rate": 9.946524064171124e-07, | |
| "loss": 0.4333, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.9026737967914439, | |
| "grad_norm": 0.6466580033302307, | |
| "learning_rate": 9.839572192513369e-07, | |
| "loss": 0.446, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.9037433155080213, | |
| "grad_norm": 0.6254947185516357, | |
| "learning_rate": 9.732620320855615e-07, | |
| "loss": 0.4972, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.9048128342245989, | |
| "grad_norm": 0.5906391739845276, | |
| "learning_rate": 9.625668449197862e-07, | |
| "loss": 0.4724, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.9058823529411765, | |
| "grad_norm": 0.5717436075210571, | |
| "learning_rate": 9.518716577540108e-07, | |
| "loss": 0.3332, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.906951871657754, | |
| "grad_norm": 0.6699836254119873, | |
| "learning_rate": 9.411764705882353e-07, | |
| "loss": 0.4766, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.9080213903743316, | |
| "grad_norm": 0.6849234104156494, | |
| "learning_rate": 9.3048128342246e-07, | |
| "loss": 0.4887, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.5922632813453674, | |
| "learning_rate": 9.197860962566846e-07, | |
| "loss": 0.3309, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9101604278074866, | |
| "grad_norm": 0.5237072706222534, | |
| "learning_rate": 9.090909090909091e-07, | |
| "loss": 0.5326, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.9112299465240642, | |
| "grad_norm": 0.8511109948158264, | |
| "learning_rate": 8.983957219251338e-07, | |
| "loss": 0.4797, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.9122994652406418, | |
| "grad_norm": 0.562660276889801, | |
| "learning_rate": 8.877005347593584e-07, | |
| "loss": 0.3809, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.9133689839572192, | |
| "grad_norm": 0.5767821073532104, | |
| "learning_rate": 8.770053475935831e-07, | |
| "loss": 0.3994, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.9144385026737968, | |
| "grad_norm": 0.6438657641410828, | |
| "learning_rate": 8.663101604278075e-07, | |
| "loss": 0.5453, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.9155080213903743, | |
| "grad_norm": 0.565201997756958, | |
| "learning_rate": 8.556149732620321e-07, | |
| "loss": 0.472, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.9165775401069519, | |
| "grad_norm": 0.6507158875465393, | |
| "learning_rate": 8.449197860962568e-07, | |
| "loss": 0.5321, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.9176470588235294, | |
| "grad_norm": 0.5403389930725098, | |
| "learning_rate": 8.342245989304813e-07, | |
| "loss": 0.4845, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.9187165775401069, | |
| "grad_norm": 0.5953645706176758, | |
| "learning_rate": 8.235294117647059e-07, | |
| "loss": 0.411, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.9197860962566845, | |
| "grad_norm": 0.695575475692749, | |
| "learning_rate": 8.128342245989306e-07, | |
| "loss": 0.5195, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9208556149732621, | |
| "grad_norm": 0.6730219721794128, | |
| "learning_rate": 8.021390374331551e-07, | |
| "loss": 0.4137, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.9219251336898395, | |
| "grad_norm": 1.4243048429489136, | |
| "learning_rate": 7.914438502673798e-07, | |
| "loss": 0.4294, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.9229946524064171, | |
| "grad_norm": 0.5623584389686584, | |
| "learning_rate": 7.807486631016044e-07, | |
| "loss": 0.4773, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.9240641711229947, | |
| "grad_norm": 0.6774773001670837, | |
| "learning_rate": 7.700534759358289e-07, | |
| "loss": 0.5562, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.9251336898395722, | |
| "grad_norm": 0.5539456605911255, | |
| "learning_rate": 7.593582887700536e-07, | |
| "loss": 0.4651, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.9262032085561497, | |
| "grad_norm": 0.6513444185256958, | |
| "learning_rate": 7.486631016042781e-07, | |
| "loss": 0.4563, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.9272727272727272, | |
| "grad_norm": 1.167435646057129, | |
| "learning_rate": 7.379679144385026e-07, | |
| "loss": 0.5558, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.9283422459893048, | |
| "grad_norm": 0.5281847715377808, | |
| "learning_rate": 7.272727272727273e-07, | |
| "loss": 0.3202, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.9294117647058824, | |
| "grad_norm": 0.6667070984840393, | |
| "learning_rate": 7.165775401069519e-07, | |
| "loss": 0.4167, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.93048128342246, | |
| "grad_norm": 0.5270236730575562, | |
| "learning_rate": 7.058823529411766e-07, | |
| "loss": 0.4095, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9315508021390374, | |
| "grad_norm": 0.6352938413619995, | |
| "learning_rate": 6.951871657754011e-07, | |
| "loss": 0.4776, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.932620320855615, | |
| "grad_norm": 0.7554007768630981, | |
| "learning_rate": 6.844919786096257e-07, | |
| "loss": 0.5654, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.9336898395721925, | |
| "grad_norm": 0.5679738521575928, | |
| "learning_rate": 6.737967914438504e-07, | |
| "loss": 0.3741, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.9347593582887701, | |
| "grad_norm": 0.5479611158370972, | |
| "learning_rate": 6.631016042780749e-07, | |
| "loss": 0.4426, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.9358288770053476, | |
| "grad_norm": 0.590634286403656, | |
| "learning_rate": 6.524064171122996e-07, | |
| "loss": 0.4094, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.9368983957219251, | |
| "grad_norm": 0.6108012795448303, | |
| "learning_rate": 6.417112299465242e-07, | |
| "loss": 0.5372, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.9379679144385027, | |
| "grad_norm": 0.6886542439460754, | |
| "learning_rate": 6.310160427807486e-07, | |
| "loss": 0.442, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.9390374331550803, | |
| "grad_norm": 1.1169320344924927, | |
| "learning_rate": 6.203208556149733e-07, | |
| "loss": 0.414, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.9401069518716577, | |
| "grad_norm": 0.5597485303878784, | |
| "learning_rate": 6.096256684491979e-07, | |
| "loss": 0.5038, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.9411764705882353, | |
| "grad_norm": 0.9075942635536194, | |
| "learning_rate": 5.989304812834224e-07, | |
| "loss": 0.4842, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.9422459893048128, | |
| "grad_norm": 0.7498214840888977, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 0.4235, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.9433155080213904, | |
| "grad_norm": 0.44716253876686096, | |
| "learning_rate": 5.775401069518717e-07, | |
| "loss": 0.3689, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.9443850267379679, | |
| "grad_norm": 0.70600825548172, | |
| "learning_rate": 5.668449197860964e-07, | |
| "loss": 0.3439, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.9454545454545454, | |
| "grad_norm": 0.8799847960472107, | |
| "learning_rate": 5.561497326203209e-07, | |
| "loss": 0.5485, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.946524064171123, | |
| "grad_norm": 0.6827099323272705, | |
| "learning_rate": 5.454545454545455e-07, | |
| "loss": 0.4108, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9475935828877006, | |
| "grad_norm": 0.6796228885650635, | |
| "learning_rate": 5.347593582887701e-07, | |
| "loss": 0.4626, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.948663101604278, | |
| "grad_norm": 0.5902719497680664, | |
| "learning_rate": 5.240641711229947e-07, | |
| "loss": 0.4051, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.9497326203208556, | |
| "grad_norm": 0.5952056050300598, | |
| "learning_rate": 5.133689839572193e-07, | |
| "loss": 0.4535, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.9508021390374332, | |
| "grad_norm": 0.5825742483139038, | |
| "learning_rate": 5.02673796791444e-07, | |
| "loss": 0.307, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.9518716577540107, | |
| "grad_norm": 0.8726152777671814, | |
| "learning_rate": 4.919786096256684e-07, | |
| "loss": 0.4499, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.9529411764705882, | |
| "grad_norm": 0.5037000775337219, | |
| "learning_rate": 4.812834224598931e-07, | |
| "loss": 0.4431, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.9540106951871657, | |
| "grad_norm": 0.6590163111686707, | |
| "learning_rate": 4.7058823529411767e-07, | |
| "loss": 0.418, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.9550802139037433, | |
| "grad_norm": 0.6153367757797241, | |
| "learning_rate": 4.598930481283423e-07, | |
| "loss": 0.4405, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.9561497326203209, | |
| "grad_norm": 0.6205403208732605, | |
| "learning_rate": 4.491978609625669e-07, | |
| "loss": 0.4167, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.9572192513368984, | |
| "grad_norm": 0.4323004186153412, | |
| "learning_rate": 4.3850267379679153e-07, | |
| "loss": 0.3526, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.9582887700534759, | |
| "grad_norm": 0.7635154724121094, | |
| "learning_rate": 4.2780748663101604e-07, | |
| "loss": 0.589, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.9593582887700535, | |
| "grad_norm": 0.7440900206565857, | |
| "learning_rate": 4.1711229946524066e-07, | |
| "loss": 0.4895, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.960427807486631, | |
| "grad_norm": 0.5068660378456116, | |
| "learning_rate": 4.064171122994653e-07, | |
| "loss": 0.4814, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.9614973262032086, | |
| "grad_norm": 0.6872171759605408, | |
| "learning_rate": 3.957219251336899e-07, | |
| "loss": 0.4035, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.9625668449197861, | |
| "grad_norm": 0.6959207057952881, | |
| "learning_rate": 3.8502673796791447e-07, | |
| "loss": 0.5726, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.9636363636363636, | |
| "grad_norm": 0.6345845460891724, | |
| "learning_rate": 3.7433155080213904e-07, | |
| "loss": 0.3893, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.9647058823529412, | |
| "grad_norm": 0.62007075548172, | |
| "learning_rate": 3.6363636363636366e-07, | |
| "loss": 0.3909, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.9657754010695188, | |
| "grad_norm": 0.6003493666648865, | |
| "learning_rate": 3.529411764705883e-07, | |
| "loss": 0.5625, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.9668449197860962, | |
| "grad_norm": 0.4929320216178894, | |
| "learning_rate": 3.4224598930481285e-07, | |
| "loss": 0.3987, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.9679144385026738, | |
| "grad_norm": 0.49141794443130493, | |
| "learning_rate": 3.3155080213903747e-07, | |
| "loss": 0.4308, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.9689839572192513, | |
| "grad_norm": 0.6370891332626343, | |
| "learning_rate": 3.208556149732621e-07, | |
| "loss": 0.4785, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.9700534759358289, | |
| "grad_norm": 0.6199567914009094, | |
| "learning_rate": 3.1016042780748665e-07, | |
| "loss": 0.4546, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.9711229946524064, | |
| "grad_norm": 0.5752115249633789, | |
| "learning_rate": 2.994652406417112e-07, | |
| "loss": 0.3874, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.9721925133689839, | |
| "grad_norm": 0.6379489302635193, | |
| "learning_rate": 2.8877005347593584e-07, | |
| "loss": 0.4679, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.9732620320855615, | |
| "grad_norm": 0.5117985606193542, | |
| "learning_rate": 2.7807486631016046e-07, | |
| "loss": 0.4179, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.9743315508021391, | |
| "grad_norm": 0.44229549169540405, | |
| "learning_rate": 2.6737967914438503e-07, | |
| "loss": 0.3306, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.9754010695187165, | |
| "grad_norm": 0.803124725818634, | |
| "learning_rate": 2.5668449197860965e-07, | |
| "loss": 0.6521, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.9764705882352941, | |
| "grad_norm": 0.5544781684875488, | |
| "learning_rate": 2.459893048128342e-07, | |
| "loss": 0.4241, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.9775401069518717, | |
| "grad_norm": 0.6121621131896973, | |
| "learning_rate": 2.3529411764705883e-07, | |
| "loss": 0.4646, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.9786096256684492, | |
| "grad_norm": 0.5191540122032166, | |
| "learning_rate": 2.2459893048128345e-07, | |
| "loss": 0.4364, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.9796791443850268, | |
| "grad_norm": 0.6737817525863647, | |
| "learning_rate": 2.1390374331550802e-07, | |
| "loss": 0.4349, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.9807486631016042, | |
| "grad_norm": 0.5362916588783264, | |
| "learning_rate": 2.0320855614973264e-07, | |
| "loss": 0.3326, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.9818181818181818, | |
| "grad_norm": 0.5771061778068542, | |
| "learning_rate": 1.9251336898395724e-07, | |
| "loss": 0.434, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.9828877005347594, | |
| "grad_norm": 0.5397818684577942, | |
| "learning_rate": 1.8181818181818183e-07, | |
| "loss": 0.3734, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.983957219251337, | |
| "grad_norm": 0.5395793914794922, | |
| "learning_rate": 1.7112299465240642e-07, | |
| "loss": 0.575, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.9850267379679144, | |
| "grad_norm": 0.65594881772995, | |
| "learning_rate": 1.6042780748663104e-07, | |
| "loss": 0.4637, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.986096256684492, | |
| "grad_norm": 0.6042408347129822, | |
| "learning_rate": 1.497326203208556e-07, | |
| "loss": 0.4913, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.9871657754010695, | |
| "grad_norm": 0.612800121307373, | |
| "learning_rate": 1.3903743315508023e-07, | |
| "loss": 0.4801, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.9882352941176471, | |
| "grad_norm": 0.6215301752090454, | |
| "learning_rate": 1.2834224598930482e-07, | |
| "loss": 0.4437, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.9893048128342246, | |
| "grad_norm": 0.5753151178359985, | |
| "learning_rate": 1.1764705882352942e-07, | |
| "loss": 0.4355, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.9903743315508021, | |
| "grad_norm": 0.6662648320198059, | |
| "learning_rate": 1.0695187165775401e-07, | |
| "loss": 0.5003, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.9914438502673797, | |
| "grad_norm": 0.6223177313804626, | |
| "learning_rate": 9.625668449197862e-08, | |
| "loss": 0.3461, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.9925133689839573, | |
| "grad_norm": 0.6701094508171082, | |
| "learning_rate": 8.556149732620321e-08, | |
| "loss": 0.5012, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.9935828877005347, | |
| "grad_norm": 0.7524900436401367, | |
| "learning_rate": 7.48663101604278e-08, | |
| "loss": 0.4354, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.9946524064171123, | |
| "grad_norm": 0.5650103092193604, | |
| "learning_rate": 6.417112299465241e-08, | |
| "loss": 0.3789, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.9957219251336898, | |
| "grad_norm": 0.6760904788970947, | |
| "learning_rate": 5.3475935828877005e-08, | |
| "loss": 0.4691, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.9967914438502674, | |
| "grad_norm": 0.4575050175189972, | |
| "learning_rate": 4.2780748663101606e-08, | |
| "loss": 0.3564, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.9978609625668449, | |
| "grad_norm": 0.5460222959518433, | |
| "learning_rate": 3.2085561497326206e-08, | |
| "loss": 0.3952, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.9989304812834224, | |
| "grad_norm": 0.47015580534935, | |
| "learning_rate": 2.1390374331550803e-08, | |
| "loss": 0.4036, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6854033470153809, | |
| "learning_rate": 1.0695187165775401e-08, | |
| "loss": 0.5034, | |
| "step": 935 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 935, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 0, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.879608944581673e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |