diff --git "a/checkpoint-2605/trainer_state.json" "b/checkpoint-2605/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-2605/trainer_state.json" @@ -0,0 +1,18269 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 5.0, + "eval_steps": 500, + "global_step": 2605, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0019203072491598655, + "grad_norm": 3.050241640622783, + "learning_rate": 0.0, + "loss": 1.5054, + "step": 1 + }, + { + "epoch": 0.003840614498319731, + "grad_norm": 2.974162230197891, + "learning_rate": 3.831417624521073e-08, + "loss": 1.7399, + "step": 2 + }, + { + "epoch": 0.005760921747479597, + "grad_norm": 3.006494012208738, + "learning_rate": 7.662835249042146e-08, + "loss": 1.8793, + "step": 3 + }, + { + "epoch": 0.007681228996639462, + "grad_norm": 2.910641370990376, + "learning_rate": 1.1494252873563219e-07, + "loss": 1.7187, + "step": 4 + }, + { + "epoch": 0.009601536245799328, + "grad_norm": 2.6996013829444134, + "learning_rate": 1.5325670498084292e-07, + "loss": 1.6475, + "step": 5 + }, + { + "epoch": 0.011521843494959194, + "grad_norm": 3.0173297525530525, + "learning_rate": 1.9157088122605365e-07, + "loss": 1.6964, + "step": 6 + }, + { + "epoch": 0.01344215074411906, + "grad_norm": 3.0082364904112566, + "learning_rate": 2.2988505747126437e-07, + "loss": 1.6851, + "step": 7 + }, + { + "epoch": 0.015362457993278924, + "grad_norm": 2.880521601062656, + "learning_rate": 2.681992337164751e-07, + "loss": 1.557, + "step": 8 + }, + { + "epoch": 0.01728276524243879, + "grad_norm": 2.8379079611890896, + "learning_rate": 3.0651340996168583e-07, + "loss": 1.6569, + "step": 9 + }, + { + "epoch": 0.019203072491598656, + "grad_norm": 2.902207545955143, + "learning_rate": 3.4482758620689656e-07, + "loss": 1.637, + "step": 10 + }, + { + "epoch": 0.02112337974075852, + "grad_norm": 2.8146383036248883, + "learning_rate": 3.831417624521073e-07, + "loss": 1.7074, + "step": 11 + }, + { + "epoch": 0.023043686989918388, + "grad_norm": 2.7702439023075422, + "learning_rate": 4.2145593869731807e-07, + "loss": 1.6621, + "step": 12 + }, + { + "epoch": 0.024963994239078253, + "grad_norm": 2.948897328700205, + "learning_rate": 4.5977011494252875e-07, + "loss": 1.7141, + "step": 13 + }, + { + "epoch": 0.02688430148823812, + "grad_norm": 2.6155066202719666, + "learning_rate": 4.980842911877395e-07, + "loss": 1.6133, + "step": 14 + }, + { + "epoch": 0.028804608737397985, + "grad_norm": 2.7754744384918335, + "learning_rate": 5.363984674329502e-07, + "loss": 1.6922, + "step": 15 + }, + { + "epoch": 0.030724915986557848, + "grad_norm": 3.005540548334177, + "learning_rate": 5.747126436781609e-07, + "loss": 1.5753, + "step": 16 + }, + { + "epoch": 0.03264522323571772, + "grad_norm": 2.6675051377737486, + "learning_rate": 6.130268199233717e-07, + "loss": 1.6697, + "step": 17 + }, + { + "epoch": 0.03456553048487758, + "grad_norm": 2.7911452731594326, + "learning_rate": 6.513409961685824e-07, + "loss": 1.6166, + "step": 18 + }, + { + "epoch": 0.03648583773403745, + "grad_norm": 2.754640389635153, + "learning_rate": 6.896551724137931e-07, + "loss": 1.4773, + "step": 19 + }, + { + "epoch": 0.03840614498319731, + "grad_norm": 2.58056430792743, + "learning_rate": 7.27969348659004e-07, + "loss": 1.5533, + "step": 20 + }, + { + "epoch": 0.040326452232357174, + "grad_norm": 2.5780862888672047, + "learning_rate": 7.662835249042146e-07, + "loss": 1.5408, + "step": 21 + }, + { + "epoch": 0.04224675948151704, + "grad_norm": 2.2939155489081293, + "learning_rate": 8.045977011494253e-07, + "loss": 1.4477, + "step": 22 + }, + { + "epoch": 0.044167066730676906, + "grad_norm": 2.326783109749172, + "learning_rate": 8.429118773946361e-07, + "loss": 1.6446, + "step": 23 + }, + { + "epoch": 0.046087373979836775, + "grad_norm": 1.9784500676559427, + "learning_rate": 8.812260536398468e-07, + "loss": 1.4741, + "step": 24 + }, + { + "epoch": 0.04800768122899664, + "grad_norm": 2.158116045848602, + "learning_rate": 9.195402298850575e-07, + "loss": 1.8359, + "step": 25 + }, + { + "epoch": 0.04992798847815651, + "grad_norm": 2.035070253571019, + "learning_rate": 9.578544061302683e-07, + "loss": 1.6079, + "step": 26 + }, + { + "epoch": 0.05184829572731637, + "grad_norm": 1.9828772550714657, + "learning_rate": 9.96168582375479e-07, + "loss": 1.6275, + "step": 27 + }, + { + "epoch": 0.05376860297647624, + "grad_norm": 2.063763559262756, + "learning_rate": 1.0344827586206898e-06, + "loss": 1.6298, + "step": 28 + }, + { + "epoch": 0.0556889102256361, + "grad_norm": 1.9339129722646953, + "learning_rate": 1.0727969348659004e-06, + "loss": 1.5816, + "step": 29 + }, + { + "epoch": 0.05760921747479597, + "grad_norm": 1.916925980149754, + "learning_rate": 1.111111111111111e-06, + "loss": 1.6064, + "step": 30 + }, + { + "epoch": 0.05952952472395583, + "grad_norm": 1.6450444187274702, + "learning_rate": 1.1494252873563219e-06, + "loss": 1.5159, + "step": 31 + }, + { + "epoch": 0.061449831973115696, + "grad_norm": 1.6132237873302806, + "learning_rate": 1.1877394636015327e-06, + "loss": 1.5367, + "step": 32 + }, + { + "epoch": 0.06337013922227556, + "grad_norm": 1.818936433606168, + "learning_rate": 1.2260536398467433e-06, + "loss": 1.4623, + "step": 33 + }, + { + "epoch": 0.06529044647143543, + "grad_norm": 1.7241994573893415, + "learning_rate": 1.2643678160919542e-06, + "loss": 1.6151, + "step": 34 + }, + { + "epoch": 0.06721075372059529, + "grad_norm": 1.5649464939481088, + "learning_rate": 1.3026819923371648e-06, + "loss": 1.6588, + "step": 35 + }, + { + "epoch": 0.06913106096975516, + "grad_norm": 1.5636025180091495, + "learning_rate": 1.3409961685823756e-06, + "loss": 1.3946, + "step": 36 + }, + { + "epoch": 0.07105136821891503, + "grad_norm": 1.5461645880192942, + "learning_rate": 1.3793103448275862e-06, + "loss": 1.6768, + "step": 37 + }, + { + "epoch": 0.0729716754680749, + "grad_norm": 1.4344137662856442, + "learning_rate": 1.417624521072797e-06, + "loss": 1.4397, + "step": 38 + }, + { + "epoch": 0.07489198271723475, + "grad_norm": 1.3960130764357097, + "learning_rate": 1.455938697318008e-06, + "loss": 1.6793, + "step": 39 + }, + { + "epoch": 0.07681228996639462, + "grad_norm": 1.2396850770578887, + "learning_rate": 1.4942528735632185e-06, + "loss": 1.5595, + "step": 40 + }, + { + "epoch": 0.07873259721555449, + "grad_norm": 1.2226941074613882, + "learning_rate": 1.5325670498084292e-06, + "loss": 1.4513, + "step": 41 + }, + { + "epoch": 0.08065290446471435, + "grad_norm": 1.2420772331499856, + "learning_rate": 1.57088122605364e-06, + "loss": 1.5815, + "step": 42 + }, + { + "epoch": 0.08257321171387422, + "grad_norm": 1.1487654644464287, + "learning_rate": 1.6091954022988506e-06, + "loss": 1.5939, + "step": 43 + }, + { + "epoch": 0.08449351896303409, + "grad_norm": 1.2124391317795524, + "learning_rate": 1.6475095785440615e-06, + "loss": 1.3123, + "step": 44 + }, + { + "epoch": 0.08641382621219396, + "grad_norm": 1.2527034268005153, + "learning_rate": 1.6858237547892723e-06, + "loss": 1.6962, + "step": 45 + }, + { + "epoch": 0.08833413346135381, + "grad_norm": 1.22540150664902, + "learning_rate": 1.724137931034483e-06, + "loss": 1.5573, + "step": 46 + }, + { + "epoch": 0.09025444071051368, + "grad_norm": 1.0822330995590186, + "learning_rate": 1.7624521072796935e-06, + "loss": 1.6174, + "step": 47 + }, + { + "epoch": 0.09217474795967355, + "grad_norm": 1.3725369113494932, + "learning_rate": 1.8007662835249044e-06, + "loss": 1.5724, + "step": 48 + }, + { + "epoch": 0.09409505520883342, + "grad_norm": 1.1246522439222844, + "learning_rate": 1.839080459770115e-06, + "loss": 1.6577, + "step": 49 + }, + { + "epoch": 0.09601536245799328, + "grad_norm": 1.153435955007641, + "learning_rate": 1.8773946360153258e-06, + "loss": 1.6045, + "step": 50 + }, + { + "epoch": 0.09793566970715314, + "grad_norm": 1.1142741754051628, + "learning_rate": 1.9157088122605367e-06, + "loss": 1.6562, + "step": 51 + }, + { + "epoch": 0.09985597695631301, + "grad_norm": 1.0608003189926583, + "learning_rate": 1.9540229885057475e-06, + "loss": 1.4951, + "step": 52 + }, + { + "epoch": 0.10177628420547287, + "grad_norm": 1.0049278590265711, + "learning_rate": 1.992337164750958e-06, + "loss": 1.5432, + "step": 53 + }, + { + "epoch": 0.10369659145463274, + "grad_norm": 0.895715467779371, + "learning_rate": 2.0306513409961687e-06, + "loss": 1.4494, + "step": 54 + }, + { + "epoch": 0.10561689870379261, + "grad_norm": 0.948460307989256, + "learning_rate": 2.0689655172413796e-06, + "loss": 1.6377, + "step": 55 + }, + { + "epoch": 0.10753720595295248, + "grad_norm": 0.8960302855343928, + "learning_rate": 2.1072796934865904e-06, + "loss": 1.5476, + "step": 56 + }, + { + "epoch": 0.10945751320211233, + "grad_norm": 0.8555220673502346, + "learning_rate": 2.145593869731801e-06, + "loss": 1.2937, + "step": 57 + }, + { + "epoch": 0.1113778204512722, + "grad_norm": 0.9297918052847216, + "learning_rate": 2.1839080459770117e-06, + "loss": 1.5018, + "step": 58 + }, + { + "epoch": 0.11329812770043207, + "grad_norm": 0.8615767845690797, + "learning_rate": 2.222222222222222e-06, + "loss": 1.4411, + "step": 59 + }, + { + "epoch": 0.11521843494959194, + "grad_norm": 0.9800340953282465, + "learning_rate": 2.260536398467433e-06, + "loss": 1.6592, + "step": 60 + }, + { + "epoch": 0.1171387421987518, + "grad_norm": 0.8319044483038768, + "learning_rate": 2.2988505747126437e-06, + "loss": 1.4999, + "step": 61 + }, + { + "epoch": 0.11905904944791167, + "grad_norm": 0.8113904065691636, + "learning_rate": 2.3371647509578546e-06, + "loss": 1.4058, + "step": 62 + }, + { + "epoch": 0.12097935669707154, + "grad_norm": 0.7584474578599453, + "learning_rate": 2.3754789272030654e-06, + "loss": 1.1995, + "step": 63 + }, + { + "epoch": 0.12289966394623139, + "grad_norm": 0.8112593744821786, + "learning_rate": 2.4137931034482762e-06, + "loss": 1.622, + "step": 64 + }, + { + "epoch": 0.12481997119539126, + "grad_norm": 0.8465973433211744, + "learning_rate": 2.4521072796934867e-06, + "loss": 1.5474, + "step": 65 + }, + { + "epoch": 0.12674027844455113, + "grad_norm": 0.829645722256521, + "learning_rate": 2.4904214559386975e-06, + "loss": 1.485, + "step": 66 + }, + { + "epoch": 0.128660585693711, + "grad_norm": 0.8480123859041653, + "learning_rate": 2.5287356321839083e-06, + "loss": 1.5641, + "step": 67 + }, + { + "epoch": 0.13058089294287087, + "grad_norm": 0.7945937151249043, + "learning_rate": 2.567049808429119e-06, + "loss": 1.3511, + "step": 68 + }, + { + "epoch": 0.13250120019203074, + "grad_norm": 0.8452583555772669, + "learning_rate": 2.6053639846743296e-06, + "loss": 1.4936, + "step": 69 + }, + { + "epoch": 0.13442150744119058, + "grad_norm": 0.7913167472525273, + "learning_rate": 2.6436781609195404e-06, + "loss": 1.524, + "step": 70 + }, + { + "epoch": 0.13634181469035045, + "grad_norm": 0.7632883226077274, + "learning_rate": 2.6819923371647512e-06, + "loss": 1.4957, + "step": 71 + }, + { + "epoch": 0.13826212193951032, + "grad_norm": 0.8040467176753551, + "learning_rate": 2.720306513409962e-06, + "loss": 1.5472, + "step": 72 + }, + { + "epoch": 0.1401824291886702, + "grad_norm": 0.8063495654842968, + "learning_rate": 2.7586206896551725e-06, + "loss": 1.4355, + "step": 73 + }, + { + "epoch": 0.14210273643783006, + "grad_norm": 0.8632589003342163, + "learning_rate": 2.796934865900383e-06, + "loss": 1.6251, + "step": 74 + }, + { + "epoch": 0.14402304368698993, + "grad_norm": 0.9065110727033228, + "learning_rate": 2.835249042145594e-06, + "loss": 1.3482, + "step": 75 + }, + { + "epoch": 0.1459433509361498, + "grad_norm": 0.7358621824934779, + "learning_rate": 2.8735632183908046e-06, + "loss": 1.4579, + "step": 76 + }, + { + "epoch": 0.14786365818530964, + "grad_norm": 0.7882740690499183, + "learning_rate": 2.911877394636016e-06, + "loss": 1.584, + "step": 77 + }, + { + "epoch": 0.1497839654344695, + "grad_norm": 0.8028390602285591, + "learning_rate": 2.9501915708812262e-06, + "loss": 1.5799, + "step": 78 + }, + { + "epoch": 0.15170427268362938, + "grad_norm": 0.7087212262202383, + "learning_rate": 2.988505747126437e-06, + "loss": 1.3796, + "step": 79 + }, + { + "epoch": 0.15362457993278925, + "grad_norm": 0.7477246745009772, + "learning_rate": 3.026819923371648e-06, + "loss": 1.4577, + "step": 80 + }, + { + "epoch": 0.15554488718194912, + "grad_norm": 0.7362367028641865, + "learning_rate": 3.0651340996168583e-06, + "loss": 1.3986, + "step": 81 + }, + { + "epoch": 0.15746519443110898, + "grad_norm": 0.7542749931181064, + "learning_rate": 3.103448275862069e-06, + "loss": 1.3923, + "step": 82 + }, + { + "epoch": 0.15938550168026885, + "grad_norm": 0.7591236429919996, + "learning_rate": 3.14176245210728e-06, + "loss": 1.468, + "step": 83 + }, + { + "epoch": 0.1613058089294287, + "grad_norm": 0.750073455610315, + "learning_rate": 3.180076628352491e-06, + "loss": 1.5728, + "step": 84 + }, + { + "epoch": 0.16322611617858857, + "grad_norm": 0.7463483577071213, + "learning_rate": 3.2183908045977012e-06, + "loss": 1.5751, + "step": 85 + }, + { + "epoch": 0.16514642342774843, + "grad_norm": 0.7459336348523459, + "learning_rate": 3.256704980842912e-06, + "loss": 1.5821, + "step": 86 + }, + { + "epoch": 0.1670667306769083, + "grad_norm": 0.7503745075545026, + "learning_rate": 3.295019157088123e-06, + "loss": 1.3811, + "step": 87 + }, + { + "epoch": 0.16898703792606817, + "grad_norm": 0.6781019129991772, + "learning_rate": 3.3333333333333333e-06, + "loss": 1.4967, + "step": 88 + }, + { + "epoch": 0.17090734517522804, + "grad_norm": 0.6772160890677477, + "learning_rate": 3.3716475095785446e-06, + "loss": 1.4602, + "step": 89 + }, + { + "epoch": 0.1728276524243879, + "grad_norm": 0.7414559131468873, + "learning_rate": 3.409961685823755e-06, + "loss": 1.5384, + "step": 90 + }, + { + "epoch": 0.17474795967354778, + "grad_norm": 0.7339713894607118, + "learning_rate": 3.448275862068966e-06, + "loss": 1.3478, + "step": 91 + }, + { + "epoch": 0.17666826692270762, + "grad_norm": 0.6741291120903733, + "learning_rate": 3.4865900383141767e-06, + "loss": 1.3051, + "step": 92 + }, + { + "epoch": 0.1785885741718675, + "grad_norm": 0.8466746593898973, + "learning_rate": 3.524904214559387e-06, + "loss": 1.6414, + "step": 93 + }, + { + "epoch": 0.18050888142102736, + "grad_norm": 0.6235998792480368, + "learning_rate": 3.563218390804598e-06, + "loss": 1.3774, + "step": 94 + }, + { + "epoch": 0.18242918867018723, + "grad_norm": 0.7888875616341429, + "learning_rate": 3.6015325670498087e-06, + "loss": 1.2273, + "step": 95 + }, + { + "epoch": 0.1843494959193471, + "grad_norm": 0.7184060206606023, + "learning_rate": 3.6398467432950196e-06, + "loss": 1.4414, + "step": 96 + }, + { + "epoch": 0.18626980316850697, + "grad_norm": 0.6931282624303644, + "learning_rate": 3.67816091954023e-06, + "loss": 1.1996, + "step": 97 + }, + { + "epoch": 0.18819011041766684, + "grad_norm": 0.6898899817332669, + "learning_rate": 3.7164750957854412e-06, + "loss": 1.153, + "step": 98 + }, + { + "epoch": 0.19011041766682668, + "grad_norm": 0.6706065855351341, + "learning_rate": 3.7547892720306517e-06, + "loss": 1.3219, + "step": 99 + }, + { + "epoch": 0.19203072491598655, + "grad_norm": 0.6618017863324709, + "learning_rate": 3.793103448275862e-06, + "loss": 1.3694, + "step": 100 + }, + { + "epoch": 0.19395103216514642, + "grad_norm": 0.7084267342732283, + "learning_rate": 3.831417624521073e-06, + "loss": 1.5242, + "step": 101 + }, + { + "epoch": 0.1958713394143063, + "grad_norm": 0.726545416016091, + "learning_rate": 3.869731800766283e-06, + "loss": 1.4521, + "step": 102 + }, + { + "epoch": 0.19779164666346616, + "grad_norm": 0.7462891561612502, + "learning_rate": 3.908045977011495e-06, + "loss": 1.4687, + "step": 103 + }, + { + "epoch": 0.19971195391262603, + "grad_norm": 0.7186363274122183, + "learning_rate": 3.946360153256705e-06, + "loss": 1.4222, + "step": 104 + }, + { + "epoch": 0.2016322611617859, + "grad_norm": 0.6562522942973675, + "learning_rate": 3.984674329501916e-06, + "loss": 1.3302, + "step": 105 + }, + { + "epoch": 0.20355256841094574, + "grad_norm": 0.7593764693035522, + "learning_rate": 4.022988505747127e-06, + "loss": 1.4029, + "step": 106 + }, + { + "epoch": 0.2054728756601056, + "grad_norm": 0.674187794860422, + "learning_rate": 4.0613026819923375e-06, + "loss": 1.2851, + "step": 107 + }, + { + "epoch": 0.20739318290926548, + "grad_norm": 0.6771453444719904, + "learning_rate": 4.099616858237548e-06, + "loss": 1.3873, + "step": 108 + }, + { + "epoch": 0.20931349015842535, + "grad_norm": 0.6904184501885661, + "learning_rate": 4.137931034482759e-06, + "loss": 1.4671, + "step": 109 + }, + { + "epoch": 0.21123379740758522, + "grad_norm": 0.7386010515763627, + "learning_rate": 4.17624521072797e-06, + "loss": 1.4114, + "step": 110 + }, + { + "epoch": 0.21315410465674509, + "grad_norm": 0.6330108602988729, + "learning_rate": 4.214559386973181e-06, + "loss": 1.5344, + "step": 111 + }, + { + "epoch": 0.21507441190590496, + "grad_norm": 0.6476316086673372, + "learning_rate": 4.252873563218391e-06, + "loss": 1.4134, + "step": 112 + }, + { + "epoch": 0.2169947191550648, + "grad_norm": 0.7369608607793586, + "learning_rate": 4.291187739463602e-06, + "loss": 1.4656, + "step": 113 + }, + { + "epoch": 0.21891502640422467, + "grad_norm": 0.6991819737178624, + "learning_rate": 4.3295019157088125e-06, + "loss": 1.3801, + "step": 114 + }, + { + "epoch": 0.22083533365338454, + "grad_norm": 0.6670879445681203, + "learning_rate": 4.367816091954023e-06, + "loss": 1.4012, + "step": 115 + }, + { + "epoch": 0.2227556409025444, + "grad_norm": 0.7091321651558993, + "learning_rate": 4.406130268199234e-06, + "loss": 1.369, + "step": 116 + }, + { + "epoch": 0.22467594815170427, + "grad_norm": 0.6538069130880931, + "learning_rate": 4.444444444444444e-06, + "loss": 1.207, + "step": 117 + }, + { + "epoch": 0.22659625540086414, + "grad_norm": 0.7414604205506811, + "learning_rate": 4.482758620689656e-06, + "loss": 1.4466, + "step": 118 + }, + { + "epoch": 0.228516562650024, + "grad_norm": 0.6754242592483168, + "learning_rate": 4.521072796934866e-06, + "loss": 1.345, + "step": 119 + }, + { + "epoch": 0.23043686989918388, + "grad_norm": 0.7890890418316723, + "learning_rate": 4.5593869731800775e-06, + "loss": 1.4887, + "step": 120 + }, + { + "epoch": 0.23235717714834372, + "grad_norm": 0.7142672200972927, + "learning_rate": 4.5977011494252875e-06, + "loss": 1.5663, + "step": 121 + }, + { + "epoch": 0.2342774843975036, + "grad_norm": 0.6607576396465201, + "learning_rate": 4.636015325670498e-06, + "loss": 1.5097, + "step": 122 + }, + { + "epoch": 0.23619779164666346, + "grad_norm": 0.7801245141924045, + "learning_rate": 4.674329501915709e-06, + "loss": 1.5577, + "step": 123 + }, + { + "epoch": 0.23811809889582333, + "grad_norm": 0.7853237789383211, + "learning_rate": 4.71264367816092e-06, + "loss": 1.5542, + "step": 124 + }, + { + "epoch": 0.2400384061449832, + "grad_norm": 0.6731916164633106, + "learning_rate": 4.750957854406131e-06, + "loss": 1.5991, + "step": 125 + }, + { + "epoch": 0.24195871339414307, + "grad_norm": 0.7081285505123779, + "learning_rate": 4.789272030651342e-06, + "loss": 1.4219, + "step": 126 + }, + { + "epoch": 0.24387902064330294, + "grad_norm": 0.6523124091427233, + "learning_rate": 4.8275862068965525e-06, + "loss": 1.3967, + "step": 127 + }, + { + "epoch": 0.24579932789246278, + "grad_norm": 0.7010318704056578, + "learning_rate": 4.8659003831417625e-06, + "loss": 1.4079, + "step": 128 + }, + { + "epoch": 0.24771963514162265, + "grad_norm": 0.7310188880844346, + "learning_rate": 4.904214559386973e-06, + "loss": 1.5645, + "step": 129 + }, + { + "epoch": 0.24963994239078252, + "grad_norm": 0.7370196750980766, + "learning_rate": 4.942528735632184e-06, + "loss": 1.3163, + "step": 130 + }, + { + "epoch": 0.2515602496399424, + "grad_norm": 0.5988613538056734, + "learning_rate": 4.980842911877395e-06, + "loss": 1.2164, + "step": 131 + }, + { + "epoch": 0.25348055688910226, + "grad_norm": 0.6686452758995931, + "learning_rate": 5.019157088122606e-06, + "loss": 1.3216, + "step": 132 + }, + { + "epoch": 0.25540086413826213, + "grad_norm": 0.6283032311907456, + "learning_rate": 5.057471264367817e-06, + "loss": 1.3004, + "step": 133 + }, + { + "epoch": 0.257321171387422, + "grad_norm": 0.7131274515928692, + "learning_rate": 5.095785440613027e-06, + "loss": 1.3574, + "step": 134 + }, + { + "epoch": 0.25924147863658187, + "grad_norm": 0.7562023685666645, + "learning_rate": 5.134099616858238e-06, + "loss": 1.4755, + "step": 135 + }, + { + "epoch": 0.26116178588574174, + "grad_norm": 0.7260126586378673, + "learning_rate": 5.172413793103449e-06, + "loss": 1.3512, + "step": 136 + }, + { + "epoch": 0.2630820931349016, + "grad_norm": 0.7027786093003081, + "learning_rate": 5.210727969348659e-06, + "loss": 1.3988, + "step": 137 + }, + { + "epoch": 0.2650024003840615, + "grad_norm": 0.6536542941035637, + "learning_rate": 5.24904214559387e-06, + "loss": 1.3403, + "step": 138 + }, + { + "epoch": 0.2669227076332213, + "grad_norm": 0.6887374540995002, + "learning_rate": 5.287356321839081e-06, + "loss": 1.4009, + "step": 139 + }, + { + "epoch": 0.26884301488238116, + "grad_norm": 0.6904650924816887, + "learning_rate": 5.3256704980842925e-06, + "loss": 1.3674, + "step": 140 + }, + { + "epoch": 0.27076332213154103, + "grad_norm": 0.7396766783545521, + "learning_rate": 5.3639846743295025e-06, + "loss": 1.2224, + "step": 141 + }, + { + "epoch": 0.2726836293807009, + "grad_norm": 0.6618698222762107, + "learning_rate": 5.402298850574713e-06, + "loss": 1.3261, + "step": 142 + }, + { + "epoch": 0.27460393662986077, + "grad_norm": 0.7346869512327489, + "learning_rate": 5.440613026819924e-06, + "loss": 1.5539, + "step": 143 + }, + { + "epoch": 0.27652424387902064, + "grad_norm": 0.6542272405748857, + "learning_rate": 5.478927203065134e-06, + "loss": 1.2301, + "step": 144 + }, + { + "epoch": 0.2784445511281805, + "grad_norm": 0.652844033827654, + "learning_rate": 5.517241379310345e-06, + "loss": 1.3203, + "step": 145 + }, + { + "epoch": 0.2803648583773404, + "grad_norm": 0.7093255205591261, + "learning_rate": 5.555555555555557e-06, + "loss": 1.4194, + "step": 146 + }, + { + "epoch": 0.28228516562650025, + "grad_norm": 0.7713263587807493, + "learning_rate": 5.593869731800766e-06, + "loss": 1.3986, + "step": 147 + }, + { + "epoch": 0.2842054728756601, + "grad_norm": 0.7273045777114594, + "learning_rate": 5.6321839080459775e-06, + "loss": 1.5226, + "step": 148 + }, + { + "epoch": 0.28612578012482, + "grad_norm": 0.6416135588874926, + "learning_rate": 5.670498084291188e-06, + "loss": 1.4148, + "step": 149 + }, + { + "epoch": 0.28804608737397985, + "grad_norm": 0.6767941047038493, + "learning_rate": 5.708812260536399e-06, + "loss": 1.4741, + "step": 150 + }, + { + "epoch": 0.2899663946231397, + "grad_norm": 0.7125976753134923, + "learning_rate": 5.747126436781609e-06, + "loss": 1.4689, + "step": 151 + }, + { + "epoch": 0.2918867018722996, + "grad_norm": 0.6848434615690435, + "learning_rate": 5.78544061302682e-06, + "loss": 1.3208, + "step": 152 + }, + { + "epoch": 0.2938070091214594, + "grad_norm": 0.7637154008633908, + "learning_rate": 5.823754789272032e-06, + "loss": 1.5237, + "step": 153 + }, + { + "epoch": 0.2957273163706193, + "grad_norm": 0.6483331857341429, + "learning_rate": 5.862068965517242e-06, + "loss": 1.3073, + "step": 154 + }, + { + "epoch": 0.29764762361977914, + "grad_norm": 0.684818235289912, + "learning_rate": 5.9003831417624525e-06, + "loss": 1.4076, + "step": 155 + }, + { + "epoch": 0.299567930868939, + "grad_norm": 0.6683114123494819, + "learning_rate": 5.938697318007663e-06, + "loss": 1.3518, + "step": 156 + }, + { + "epoch": 0.3014882381180989, + "grad_norm": 0.8301668940114854, + "learning_rate": 5.977011494252874e-06, + "loss": 1.5087, + "step": 157 + }, + { + "epoch": 0.30340854536725875, + "grad_norm": 0.7306107739349151, + "learning_rate": 6.015325670498084e-06, + "loss": 1.4187, + "step": 158 + }, + { + "epoch": 0.3053288526164186, + "grad_norm": 0.692887187718929, + "learning_rate": 6.053639846743296e-06, + "loss": 1.5365, + "step": 159 + }, + { + "epoch": 0.3072491598655785, + "grad_norm": 0.6502853635559999, + "learning_rate": 6.091954022988507e-06, + "loss": 1.3497, + "step": 160 + }, + { + "epoch": 0.30916946711473836, + "grad_norm": 0.7309674020853324, + "learning_rate": 6.130268199233717e-06, + "loss": 1.4965, + "step": 161 + }, + { + "epoch": 0.31108977436389823, + "grad_norm": 0.7124865800378807, + "learning_rate": 6.1685823754789275e-06, + "loss": 1.4449, + "step": 162 + }, + { + "epoch": 0.3130100816130581, + "grad_norm": 0.692769353628125, + "learning_rate": 6.206896551724138e-06, + "loss": 1.2823, + "step": 163 + }, + { + "epoch": 0.31493038886221797, + "grad_norm": 0.8120061941639997, + "learning_rate": 6.24521072796935e-06, + "loss": 1.5322, + "step": 164 + }, + { + "epoch": 0.31685069611137784, + "grad_norm": 0.6506906315887734, + "learning_rate": 6.28352490421456e-06, + "loss": 1.4138, + "step": 165 + }, + { + "epoch": 0.3187710033605377, + "grad_norm": 0.7021274588006857, + "learning_rate": 6.321839080459771e-06, + "loss": 1.5534, + "step": 166 + }, + { + "epoch": 0.3206913106096976, + "grad_norm": 0.6965648094070332, + "learning_rate": 6.360153256704982e-06, + "loss": 1.3055, + "step": 167 + }, + { + "epoch": 0.3226116178588574, + "grad_norm": 0.7498732720147595, + "learning_rate": 6.398467432950192e-06, + "loss": 1.2775, + "step": 168 + }, + { + "epoch": 0.32453192510801726, + "grad_norm": 0.7097564261194121, + "learning_rate": 6.4367816091954025e-06, + "loss": 1.3029, + "step": 169 + }, + { + "epoch": 0.32645223235717713, + "grad_norm": 0.6382917432482148, + "learning_rate": 6.475095785440614e-06, + "loss": 1.3569, + "step": 170 + }, + { + "epoch": 0.328372539606337, + "grad_norm": 0.6745359700702276, + "learning_rate": 6.513409961685824e-06, + "loss": 1.3339, + "step": 171 + }, + { + "epoch": 0.33029284685549687, + "grad_norm": 0.8056549461489378, + "learning_rate": 6.551724137931035e-06, + "loss": 1.4649, + "step": 172 + }, + { + "epoch": 0.33221315410465674, + "grad_norm": 0.7305964712888819, + "learning_rate": 6.590038314176246e-06, + "loss": 1.4451, + "step": 173 + }, + { + "epoch": 0.3341334613538166, + "grad_norm": 0.6201165387321146, + "learning_rate": 6.628352490421457e-06, + "loss": 1.3352, + "step": 174 + }, + { + "epoch": 0.3360537686029765, + "grad_norm": 0.8265036713279577, + "learning_rate": 6.666666666666667e-06, + "loss": 1.4794, + "step": 175 + }, + { + "epoch": 0.33797407585213635, + "grad_norm": 0.6893681404195715, + "learning_rate": 6.7049808429118775e-06, + "loss": 1.4419, + "step": 176 + }, + { + "epoch": 0.3398943831012962, + "grad_norm": 0.7509743815259918, + "learning_rate": 6.743295019157089e-06, + "loss": 1.3666, + "step": 177 + }, + { + "epoch": 0.3418146903504561, + "grad_norm": 0.7122960885573658, + "learning_rate": 6.781609195402299e-06, + "loss": 1.4108, + "step": 178 + }, + { + "epoch": 0.34373499759961595, + "grad_norm": 0.848930385570575, + "learning_rate": 6.81992337164751e-06, + "loss": 1.4827, + "step": 179 + }, + { + "epoch": 0.3456553048487758, + "grad_norm": 0.7602028733722221, + "learning_rate": 6.858237547892721e-06, + "loss": 1.3414, + "step": 180 + }, + { + "epoch": 0.3475756120979357, + "grad_norm": 0.8288114979771807, + "learning_rate": 6.896551724137932e-06, + "loss": 1.2727, + "step": 181 + }, + { + "epoch": 0.34949591934709556, + "grad_norm": 0.7693760683257455, + "learning_rate": 6.934865900383142e-06, + "loss": 1.3706, + "step": 182 + }, + { + "epoch": 0.3514162265962554, + "grad_norm": 0.7808563776887848, + "learning_rate": 6.973180076628353e-06, + "loss": 1.4723, + "step": 183 + }, + { + "epoch": 0.35333653384541525, + "grad_norm": 0.8734769267258163, + "learning_rate": 7.011494252873564e-06, + "loss": 1.2891, + "step": 184 + }, + { + "epoch": 0.3552568410945751, + "grad_norm": 0.8867229876837837, + "learning_rate": 7.049808429118774e-06, + "loss": 1.4359, + "step": 185 + }, + { + "epoch": 0.357177148343735, + "grad_norm": 0.8349083935516824, + "learning_rate": 7.088122605363985e-06, + "loss": 1.5565, + "step": 186 + }, + { + "epoch": 0.35909745559289485, + "grad_norm": 0.7400284977493244, + "learning_rate": 7.126436781609196e-06, + "loss": 1.456, + "step": 187 + }, + { + "epoch": 0.3610177628420547, + "grad_norm": 0.6439707610744461, + "learning_rate": 7.1647509578544075e-06, + "loss": 1.2718, + "step": 188 + }, + { + "epoch": 0.3629380700912146, + "grad_norm": 0.757428349385491, + "learning_rate": 7.2030651340996175e-06, + "loss": 1.3176, + "step": 189 + }, + { + "epoch": 0.36485837734037446, + "grad_norm": 0.6979336476743889, + "learning_rate": 7.241379310344828e-06, + "loss": 1.1578, + "step": 190 + }, + { + "epoch": 0.36677868458953433, + "grad_norm": 0.7318936685975646, + "learning_rate": 7.279693486590039e-06, + "loss": 1.5306, + "step": 191 + }, + { + "epoch": 0.3686989918386942, + "grad_norm": 0.6968684758048505, + "learning_rate": 7.318007662835249e-06, + "loss": 1.3176, + "step": 192 + }, + { + "epoch": 0.37061929908785407, + "grad_norm": 0.8360636142316585, + "learning_rate": 7.35632183908046e-06, + "loss": 1.6162, + "step": 193 + }, + { + "epoch": 0.37253960633701394, + "grad_norm": 0.6842163000752954, + "learning_rate": 7.394636015325672e-06, + "loss": 1.3775, + "step": 194 + }, + { + "epoch": 0.3744599135861738, + "grad_norm": 0.7405931672623491, + "learning_rate": 7.4329501915708825e-06, + "loss": 1.4155, + "step": 195 + }, + { + "epoch": 0.3763802208353337, + "grad_norm": 0.6724666872249973, + "learning_rate": 7.4712643678160925e-06, + "loss": 1.6041, + "step": 196 + }, + { + "epoch": 0.3783005280844935, + "grad_norm": 0.6600816017352631, + "learning_rate": 7.509578544061303e-06, + "loss": 1.3836, + "step": 197 + }, + { + "epoch": 0.38022083533365336, + "grad_norm": 0.7294649653837092, + "learning_rate": 7.547892720306514e-06, + "loss": 1.3381, + "step": 198 + }, + { + "epoch": 0.38214114258281323, + "grad_norm": 0.6849006152291969, + "learning_rate": 7.586206896551724e-06, + "loss": 1.3448, + "step": 199 + }, + { + "epoch": 0.3840614498319731, + "grad_norm": 0.6413698682933802, + "learning_rate": 7.624521072796936e-06, + "loss": 1.1691, + "step": 200 + }, + { + "epoch": 0.38598175708113297, + "grad_norm": 0.6761946714227771, + "learning_rate": 7.662835249042147e-06, + "loss": 1.2401, + "step": 201 + }, + { + "epoch": 0.38790206433029284, + "grad_norm": 0.6604987264007839, + "learning_rate": 7.701149425287356e-06, + "loss": 1.4297, + "step": 202 + }, + { + "epoch": 0.3898223715794527, + "grad_norm": 0.6655444711020334, + "learning_rate": 7.739463601532567e-06, + "loss": 1.2069, + "step": 203 + }, + { + "epoch": 0.3917426788286126, + "grad_norm": 0.716356344463303, + "learning_rate": 7.77777777777778e-06, + "loss": 1.3456, + "step": 204 + }, + { + "epoch": 0.39366298607777245, + "grad_norm": 0.7371288713977551, + "learning_rate": 7.81609195402299e-06, + "loss": 1.3331, + "step": 205 + }, + { + "epoch": 0.3955832933269323, + "grad_norm": 0.6679678204725035, + "learning_rate": 7.854406130268199e-06, + "loss": 1.4027, + "step": 206 + }, + { + "epoch": 0.3975036005760922, + "grad_norm": 0.6255494545199866, + "learning_rate": 7.89272030651341e-06, + "loss": 1.4204, + "step": 207 + }, + { + "epoch": 0.39942390782525206, + "grad_norm": 0.7776077277070255, + "learning_rate": 7.93103448275862e-06, + "loss": 1.4262, + "step": 208 + }, + { + "epoch": 0.4013442150744119, + "grad_norm": 0.7495735486802019, + "learning_rate": 7.969348659003832e-06, + "loss": 1.3803, + "step": 209 + }, + { + "epoch": 0.4032645223235718, + "grad_norm": 0.7359991087372613, + "learning_rate": 8.007662835249042e-06, + "loss": 1.3407, + "step": 210 + }, + { + "epoch": 0.40518482957273166, + "grad_norm": 0.74502169700267, + "learning_rate": 8.045977011494253e-06, + "loss": 1.2109, + "step": 211 + }, + { + "epoch": 0.4071051368218915, + "grad_norm": 0.672919730166899, + "learning_rate": 8.084291187739464e-06, + "loss": 1.4416, + "step": 212 + }, + { + "epoch": 0.40902544407105135, + "grad_norm": 0.9039099669663537, + "learning_rate": 8.122605363984675e-06, + "loss": 1.5869, + "step": 213 + }, + { + "epoch": 0.4109457513202112, + "grad_norm": 0.609809507238103, + "learning_rate": 8.160919540229886e-06, + "loss": 1.392, + "step": 214 + }, + { + "epoch": 0.4128660585693711, + "grad_norm": 0.7209905089139385, + "learning_rate": 8.199233716475097e-06, + "loss": 1.5286, + "step": 215 + }, + { + "epoch": 0.41478636581853096, + "grad_norm": 0.7641098257228572, + "learning_rate": 8.237547892720307e-06, + "loss": 1.4556, + "step": 216 + }, + { + "epoch": 0.4167066730676908, + "grad_norm": 0.6671329519012665, + "learning_rate": 8.275862068965518e-06, + "loss": 1.4011, + "step": 217 + }, + { + "epoch": 0.4186269803168507, + "grad_norm": 0.7571002797488336, + "learning_rate": 8.31417624521073e-06, + "loss": 1.3907, + "step": 218 + }, + { + "epoch": 0.42054728756601056, + "grad_norm": 0.6765159050714699, + "learning_rate": 8.35249042145594e-06, + "loss": 1.411, + "step": 219 + }, + { + "epoch": 0.42246759481517043, + "grad_norm": 0.7631812758174655, + "learning_rate": 8.390804597701149e-06, + "loss": 1.4568, + "step": 220 + }, + { + "epoch": 0.4243879020643303, + "grad_norm": 0.819638920429698, + "learning_rate": 8.429118773946362e-06, + "loss": 1.2607, + "step": 221 + }, + { + "epoch": 0.42630820931349017, + "grad_norm": 0.6856567916211197, + "learning_rate": 8.467432950191573e-06, + "loss": 1.3665, + "step": 222 + }, + { + "epoch": 0.42822851656265004, + "grad_norm": 0.756851173923122, + "learning_rate": 8.505747126436782e-06, + "loss": 1.4053, + "step": 223 + }, + { + "epoch": 0.4301488238118099, + "grad_norm": 0.7234111087737882, + "learning_rate": 8.544061302681992e-06, + "loss": 1.2986, + "step": 224 + }, + { + "epoch": 0.4320691310609698, + "grad_norm": 0.7822272702322975, + "learning_rate": 8.582375478927203e-06, + "loss": 1.4298, + "step": 225 + }, + { + "epoch": 0.4339894383101296, + "grad_norm": 0.6810248686630753, + "learning_rate": 8.620689655172414e-06, + "loss": 1.3399, + "step": 226 + }, + { + "epoch": 0.43590974555928946, + "grad_norm": 0.8051128970700839, + "learning_rate": 8.659003831417625e-06, + "loss": 1.5267, + "step": 227 + }, + { + "epoch": 0.43783005280844933, + "grad_norm": 0.732357170450308, + "learning_rate": 8.697318007662836e-06, + "loss": 1.4766, + "step": 228 + }, + { + "epoch": 0.4397503600576092, + "grad_norm": 0.8096174835706853, + "learning_rate": 8.735632183908047e-06, + "loss": 1.5061, + "step": 229 + }, + { + "epoch": 0.44167066730676907, + "grad_norm": 0.7498363788557091, + "learning_rate": 8.773946360153257e-06, + "loss": 1.5328, + "step": 230 + }, + { + "epoch": 0.44359097455592894, + "grad_norm": 0.6639308793432624, + "learning_rate": 8.812260536398468e-06, + "loss": 1.5014, + "step": 231 + }, + { + "epoch": 0.4455112818050888, + "grad_norm": 0.7955581619122697, + "learning_rate": 8.85057471264368e-06, + "loss": 1.4619, + "step": 232 + }, + { + "epoch": 0.4474315890542487, + "grad_norm": 0.6665415125116451, + "learning_rate": 8.888888888888888e-06, + "loss": 1.4401, + "step": 233 + }, + { + "epoch": 0.44935189630340855, + "grad_norm": 0.7497106975893845, + "learning_rate": 8.9272030651341e-06, + "loss": 1.4444, + "step": 234 + }, + { + "epoch": 0.4512722035525684, + "grad_norm": 0.7655250611774667, + "learning_rate": 8.965517241379312e-06, + "loss": 1.4183, + "step": 235 + }, + { + "epoch": 0.4531925108017283, + "grad_norm": 0.7069725743129521, + "learning_rate": 9.003831417624522e-06, + "loss": 1.4695, + "step": 236 + }, + { + "epoch": 0.45511281805088816, + "grad_norm": 0.7033810098540295, + "learning_rate": 9.042145593869732e-06, + "loss": 1.432, + "step": 237 + }, + { + "epoch": 0.457033125300048, + "grad_norm": 0.7657511117560273, + "learning_rate": 9.080459770114942e-06, + "loss": 1.3106, + "step": 238 + }, + { + "epoch": 0.4589534325492079, + "grad_norm": 0.6734934664683107, + "learning_rate": 9.118773946360155e-06, + "loss": 1.2657, + "step": 239 + }, + { + "epoch": 0.46087373979836777, + "grad_norm": 0.6793414583903499, + "learning_rate": 9.157088122605364e-06, + "loss": 1.2823, + "step": 240 + }, + { + "epoch": 0.4627940470475276, + "grad_norm": 0.6868551209136969, + "learning_rate": 9.195402298850575e-06, + "loss": 1.1981, + "step": 241 + }, + { + "epoch": 0.46471435429668745, + "grad_norm": 0.7999498220323118, + "learning_rate": 9.233716475095786e-06, + "loss": 1.3222, + "step": 242 + }, + { + "epoch": 0.4666346615458473, + "grad_norm": 0.6765218175071678, + "learning_rate": 9.272030651340997e-06, + "loss": 1.3159, + "step": 243 + }, + { + "epoch": 0.4685549687950072, + "grad_norm": 0.7633174577339077, + "learning_rate": 9.310344827586207e-06, + "loss": 1.4493, + "step": 244 + }, + { + "epoch": 0.47047527604416706, + "grad_norm": 0.7434731700432043, + "learning_rate": 9.348659003831418e-06, + "loss": 1.3771, + "step": 245 + }, + { + "epoch": 0.4723955832933269, + "grad_norm": 0.7887594763056447, + "learning_rate": 9.386973180076629e-06, + "loss": 1.2978, + "step": 246 + }, + { + "epoch": 0.4743158905424868, + "grad_norm": 0.6816410513843812, + "learning_rate": 9.42528735632184e-06, + "loss": 1.3221, + "step": 247 + }, + { + "epoch": 0.47623619779164666, + "grad_norm": 0.6726044542699641, + "learning_rate": 9.46360153256705e-06, + "loss": 1.3954, + "step": 248 + }, + { + "epoch": 0.47815650504080653, + "grad_norm": 0.8209401718350966, + "learning_rate": 9.501915708812262e-06, + "loss": 1.2964, + "step": 249 + }, + { + "epoch": 0.4800768122899664, + "grad_norm": 0.7150467022402414, + "learning_rate": 9.54022988505747e-06, + "loss": 1.2462, + "step": 250 + }, + { + "epoch": 0.4819971195391263, + "grad_norm": 0.7473286809494867, + "learning_rate": 9.578544061302683e-06, + "loss": 1.3987, + "step": 251 + }, + { + "epoch": 0.48391742678828614, + "grad_norm": 0.8919512518954593, + "learning_rate": 9.616858237547894e-06, + "loss": 1.3271, + "step": 252 + }, + { + "epoch": 0.485837734037446, + "grad_norm": 0.7235436092130889, + "learning_rate": 9.655172413793105e-06, + "loss": 1.4006, + "step": 253 + }, + { + "epoch": 0.4877580412866059, + "grad_norm": 0.7496985395380509, + "learning_rate": 9.693486590038314e-06, + "loss": 1.3485, + "step": 254 + }, + { + "epoch": 0.4896783485357657, + "grad_norm": 0.7558104853132458, + "learning_rate": 9.731800766283525e-06, + "loss": 1.4354, + "step": 255 + }, + { + "epoch": 0.49159865578492556, + "grad_norm": 0.8026084646949407, + "learning_rate": 9.770114942528738e-06, + "loss": 1.3458, + "step": 256 + }, + { + "epoch": 0.49351896303408543, + "grad_norm": 0.6871351528584623, + "learning_rate": 9.808429118773947e-06, + "loss": 1.2849, + "step": 257 + }, + { + "epoch": 0.4954392702832453, + "grad_norm": 0.6733156882936058, + "learning_rate": 9.846743295019157e-06, + "loss": 1.3416, + "step": 258 + }, + { + "epoch": 0.4973595775324052, + "grad_norm": 0.8040078483269378, + "learning_rate": 9.885057471264368e-06, + "loss": 1.3933, + "step": 259 + }, + { + "epoch": 0.49927988478156504, + "grad_norm": 0.7019620968242437, + "learning_rate": 9.923371647509579e-06, + "loss": 1.4092, + "step": 260 + }, + { + "epoch": 0.501200192030725, + "grad_norm": 0.7993119421084648, + "learning_rate": 9.96168582375479e-06, + "loss": 1.4496, + "step": 261 + }, + { + "epoch": 0.5031204992798848, + "grad_norm": 0.7500420731214819, + "learning_rate": 1e-05, + "loss": 1.275, + "step": 262 + }, + { + "epoch": 0.5050408065290446, + "grad_norm": 0.7096092331830312, + "learning_rate": 9.999995509192137e-06, + "loss": 1.4075, + "step": 263 + }, + { + "epoch": 0.5069611137782045, + "grad_norm": 0.6880277184241935, + "learning_rate": 9.999982036776617e-06, + "loss": 1.3853, + "step": 264 + }, + { + "epoch": 0.5088814210273643, + "grad_norm": 0.6887794428820988, + "learning_rate": 9.999959582777638e-06, + "loss": 1.2526, + "step": 265 + }, + { + "epoch": 0.5108017282765243, + "grad_norm": 0.6962486538004352, + "learning_rate": 9.999928147235536e-06, + "loss": 1.385, + "step": 266 + }, + { + "epoch": 0.5127220355256841, + "grad_norm": 0.7314717827625464, + "learning_rate": 9.99988773020678e-06, + "loss": 1.5364, + "step": 267 + }, + { + "epoch": 0.514642342774844, + "grad_norm": 0.6755436676174023, + "learning_rate": 9.99983833176397e-06, + "loss": 1.1625, + "step": 268 + }, + { + "epoch": 0.5165626500240038, + "grad_norm": 0.6799257698916303, + "learning_rate": 9.999779951995845e-06, + "loss": 1.3743, + "step": 269 + }, + { + "epoch": 0.5184829572731637, + "grad_norm": 0.6967760972692357, + "learning_rate": 9.99971259100727e-06, + "loss": 1.4619, + "step": 270 + }, + { + "epoch": 0.5204032645223235, + "grad_norm": 0.6876004824004736, + "learning_rate": 9.99963624891925e-06, + "loss": 1.2302, + "step": 271 + }, + { + "epoch": 0.5223235717714835, + "grad_norm": 0.741525025016876, + "learning_rate": 9.999550925868919e-06, + "loss": 1.4944, + "step": 272 + }, + { + "epoch": 0.5242438790206433, + "grad_norm": 0.7044462970993084, + "learning_rate": 9.999456622009545e-06, + "loss": 1.3233, + "step": 273 + }, + { + "epoch": 0.5261641862698032, + "grad_norm": 0.7147463675731596, + "learning_rate": 9.999353337510526e-06, + "loss": 1.4728, + "step": 274 + }, + { + "epoch": 0.528084493518963, + "grad_norm": 0.7580689885973482, + "learning_rate": 9.9992410725574e-06, + "loss": 1.3894, + "step": 275 + }, + { + "epoch": 0.530004800768123, + "grad_norm": 0.627159916011877, + "learning_rate": 9.999119827351824e-06, + "loss": 1.1499, + "step": 276 + }, + { + "epoch": 0.5319251080172828, + "grad_norm": 0.7347721430138173, + "learning_rate": 9.998989602111599e-06, + "loss": 1.3863, + "step": 277 + }, + { + "epoch": 0.5338454152664426, + "grad_norm": 0.7561542947388183, + "learning_rate": 9.99885039707065e-06, + "loss": 1.4094, + "step": 278 + }, + { + "epoch": 0.5357657225156025, + "grad_norm": 0.7257428578657052, + "learning_rate": 9.998702212479031e-06, + "loss": 1.3558, + "step": 279 + }, + { + "epoch": 0.5376860297647623, + "grad_norm": 0.7156710718415759, + "learning_rate": 9.998545048602938e-06, + "loss": 1.3932, + "step": 280 + }, + { + "epoch": 0.5396063370139222, + "grad_norm": 0.8324536861856755, + "learning_rate": 9.998378905724677e-06, + "loss": 1.4631, + "step": 281 + }, + { + "epoch": 0.5415266442630821, + "grad_norm": 0.769865333129953, + "learning_rate": 9.998203784142701e-06, + "loss": 1.4778, + "step": 282 + }, + { + "epoch": 0.543446951512242, + "grad_norm": 0.7558894226395197, + "learning_rate": 9.998019684171585e-06, + "loss": 1.3401, + "step": 283 + }, + { + "epoch": 0.5453672587614018, + "grad_norm": 0.745453902467428, + "learning_rate": 9.997826606142031e-06, + "loss": 1.376, + "step": 284 + }, + { + "epoch": 0.5472875660105617, + "grad_norm": 0.8525307677948906, + "learning_rate": 9.997624550400869e-06, + "loss": 1.3269, + "step": 285 + }, + { + "epoch": 0.5492078732597215, + "grad_norm": 0.8993675673573773, + "learning_rate": 9.997413517311055e-06, + "loss": 1.3973, + "step": 286 + }, + { + "epoch": 0.5511281805088815, + "grad_norm": 0.7637767723846134, + "learning_rate": 9.997193507251676e-06, + "loss": 1.4107, + "step": 287 + }, + { + "epoch": 0.5530484877580413, + "grad_norm": 0.7334145684503066, + "learning_rate": 9.996964520617938e-06, + "loss": 1.2548, + "step": 288 + }, + { + "epoch": 0.5549687950072012, + "grad_norm": 0.8286763467838726, + "learning_rate": 9.996726557821177e-06, + "loss": 1.3915, + "step": 289 + }, + { + "epoch": 0.556889102256361, + "grad_norm": 0.786223417485439, + "learning_rate": 9.996479619288853e-06, + "loss": 1.2113, + "step": 290 + }, + { + "epoch": 0.5588094095055209, + "grad_norm": 0.6629161249458739, + "learning_rate": 9.996223705464542e-06, + "loss": 1.383, + "step": 291 + }, + { + "epoch": 0.5607297167546808, + "grad_norm": 0.7763568743151492, + "learning_rate": 9.995958816807951e-06, + "loss": 1.511, + "step": 292 + }, + { + "epoch": 0.5626500240038406, + "grad_norm": 0.8706771128343518, + "learning_rate": 9.995684953794905e-06, + "loss": 1.301, + "step": 293 + }, + { + "epoch": 0.5645703312530005, + "grad_norm": 0.7186979734065922, + "learning_rate": 9.995402116917353e-06, + "loss": 1.3137, + "step": 294 + }, + { + "epoch": 0.5664906385021603, + "grad_norm": 0.7197477949946243, + "learning_rate": 9.995110306683358e-06, + "loss": 1.3924, + "step": 295 + }, + { + "epoch": 0.5684109457513202, + "grad_norm": 0.7990094094001562, + "learning_rate": 9.994809523617109e-06, + "loss": 1.3595, + "step": 296 + }, + { + "epoch": 0.57033125300048, + "grad_norm": 0.7548256910113419, + "learning_rate": 9.994499768258905e-06, + "loss": 1.2627, + "step": 297 + }, + { + "epoch": 0.57225156024964, + "grad_norm": 0.6080847660246027, + "learning_rate": 9.994181041165169e-06, + "loss": 1.233, + "step": 298 + }, + { + "epoch": 0.5741718674987998, + "grad_norm": 0.7522747291038051, + "learning_rate": 9.99385334290844e-06, + "loss": 1.4473, + "step": 299 + }, + { + "epoch": 0.5760921747479597, + "grad_norm": 0.693071662167599, + "learning_rate": 9.993516674077367e-06, + "loss": 1.3112, + "step": 300 + }, + { + "epoch": 0.5780124819971195, + "grad_norm": 0.7289575683988432, + "learning_rate": 9.993171035276717e-06, + "loss": 1.4447, + "step": 301 + }, + { + "epoch": 0.5799327892462794, + "grad_norm": 0.7219332107019164, + "learning_rate": 9.992816427127367e-06, + "loss": 1.3059, + "step": 302 + }, + { + "epoch": 0.5818530964954393, + "grad_norm": 0.7884436507160677, + "learning_rate": 9.992452850266313e-06, + "loss": 1.4828, + "step": 303 + }, + { + "epoch": 0.5837734037445992, + "grad_norm": 0.6835750513473184, + "learning_rate": 9.992080305346652e-06, + "loss": 1.3351, + "step": 304 + }, + { + "epoch": 0.585693710993759, + "grad_norm": 0.7455682427352298, + "learning_rate": 9.991698793037596e-06, + "loss": 1.3393, + "step": 305 + }, + { + "epoch": 0.5876140182429188, + "grad_norm": 0.6344234674402865, + "learning_rate": 9.991308314024466e-06, + "loss": 1.1186, + "step": 306 + }, + { + "epoch": 0.5895343254920787, + "grad_norm": 0.6463680302231726, + "learning_rate": 9.990908869008685e-06, + "loss": 1.3536, + "step": 307 + }, + { + "epoch": 0.5914546327412386, + "grad_norm": 0.7763640180054763, + "learning_rate": 9.99050045870779e-06, + "loss": 1.4034, + "step": 308 + }, + { + "epoch": 0.5933749399903985, + "grad_norm": 0.7729679972786335, + "learning_rate": 9.990083083855413e-06, + "loss": 1.518, + "step": 309 + }, + { + "epoch": 0.5952952472395583, + "grad_norm": 0.7293910429882318, + "learning_rate": 9.9896567452013e-06, + "loss": 1.4168, + "step": 310 + }, + { + "epoch": 0.5972155544887182, + "grad_norm": 0.6938721572185663, + "learning_rate": 9.989221443511286e-06, + "loss": 1.5813, + "step": 311 + }, + { + "epoch": 0.599135861737878, + "grad_norm": 0.6823348506968784, + "learning_rate": 9.98877717956732e-06, + "loss": 1.396, + "step": 312 + }, + { + "epoch": 0.601056168987038, + "grad_norm": 0.6470598419144085, + "learning_rate": 9.988323954167438e-06, + "loss": 1.3138, + "step": 313 + }, + { + "epoch": 0.6029764762361978, + "grad_norm": 0.6955870872672735, + "learning_rate": 9.987861768125783e-06, + "loss": 1.4043, + "step": 314 + }, + { + "epoch": 0.6048967834853577, + "grad_norm": 0.7788214111440995, + "learning_rate": 9.98739062227259e-06, + "loss": 1.5305, + "step": 315 + }, + { + "epoch": 0.6068170907345175, + "grad_norm": 0.6598071275711943, + "learning_rate": 9.986910517454188e-06, + "loss": 1.185, + "step": 316 + }, + { + "epoch": 0.6087373979836774, + "grad_norm": 0.7478469808999688, + "learning_rate": 9.986421454533001e-06, + "loss": 1.2875, + "step": 317 + }, + { + "epoch": 0.6106577052328372, + "grad_norm": 0.7102386979345604, + "learning_rate": 9.985923434387545e-06, + "loss": 1.2623, + "step": 318 + }, + { + "epoch": 0.6125780124819972, + "grad_norm": 0.8159684281974479, + "learning_rate": 9.985416457912423e-06, + "loss": 1.3859, + "step": 319 + }, + { + "epoch": 0.614498319731157, + "grad_norm": 0.7359831696017346, + "learning_rate": 9.984900526018331e-06, + "loss": 1.3127, + "step": 320 + }, + { + "epoch": 0.6164186269803168, + "grad_norm": 0.7406050107164467, + "learning_rate": 9.984375639632047e-06, + "loss": 1.3526, + "step": 321 + }, + { + "epoch": 0.6183389342294767, + "grad_norm": 0.7171563092825125, + "learning_rate": 9.98384179969644e-06, + "loss": 1.3718, + "step": 322 + }, + { + "epoch": 0.6202592414786365, + "grad_norm": 0.7444992833186831, + "learning_rate": 9.983299007170454e-06, + "loss": 1.2605, + "step": 323 + }, + { + "epoch": 0.6221795487277965, + "grad_norm": 0.6718703991955216, + "learning_rate": 9.982747263029123e-06, + "loss": 1.2696, + "step": 324 + }, + { + "epoch": 0.6240998559769563, + "grad_norm": 0.6953480856589247, + "learning_rate": 9.982186568263558e-06, + "loss": 1.402, + "step": 325 + }, + { + "epoch": 0.6260201632261162, + "grad_norm": 0.6814410132323387, + "learning_rate": 9.981616923880948e-06, + "loss": 1.357, + "step": 326 + }, + { + "epoch": 0.627940470475276, + "grad_norm": 0.6789399695802566, + "learning_rate": 9.981038330904556e-06, + "loss": 1.3563, + "step": 327 + }, + { + "epoch": 0.6298607777244359, + "grad_norm": 0.7084278817620212, + "learning_rate": 9.980450790373724e-06, + "loss": 1.3983, + "step": 328 + }, + { + "epoch": 0.6317810849735958, + "grad_norm": 0.7561797459599714, + "learning_rate": 9.979854303343866e-06, + "loss": 1.3998, + "step": 329 + }, + { + "epoch": 0.6337013922227557, + "grad_norm": 0.7063013474407709, + "learning_rate": 9.979248870886463e-06, + "loss": 1.4128, + "step": 330 + }, + { + "epoch": 0.6356216994719155, + "grad_norm": 0.7285839877290075, + "learning_rate": 9.978634494089066e-06, + "loss": 1.5026, + "step": 331 + }, + { + "epoch": 0.6375420067210754, + "grad_norm": 0.7631063764422019, + "learning_rate": 9.9780111740553e-06, + "loss": 1.3353, + "step": 332 + }, + { + "epoch": 0.6394623139702352, + "grad_norm": 0.8022733412976211, + "learning_rate": 9.977378911904843e-06, + "loss": 1.3017, + "step": 333 + }, + { + "epoch": 0.6413826212193952, + "grad_norm": 0.7397584519382538, + "learning_rate": 9.976737708773445e-06, + "loss": 1.4274, + "step": 334 + }, + { + "epoch": 0.643302928468555, + "grad_norm": 0.7498254808267637, + "learning_rate": 9.976087565812913e-06, + "loss": 1.2821, + "step": 335 + }, + { + "epoch": 0.6452232357177148, + "grad_norm": 0.8440233129581642, + "learning_rate": 9.975428484191117e-06, + "loss": 1.507, + "step": 336 + }, + { + "epoch": 0.6471435429668747, + "grad_norm": 0.7140420729824992, + "learning_rate": 9.974760465091975e-06, + "loss": 1.3541, + "step": 337 + }, + { + "epoch": 0.6490638502160345, + "grad_norm": 0.7935959679049022, + "learning_rate": 9.974083509715471e-06, + "loss": 1.3704, + "step": 338 + }, + { + "epoch": 0.6509841574651944, + "grad_norm": 0.8128531878052235, + "learning_rate": 9.973397619277631e-06, + "loss": 1.3941, + "step": 339 + }, + { + "epoch": 0.6529044647143543, + "grad_norm": 0.7087809281812139, + "learning_rate": 9.972702795010539e-06, + "loss": 1.3024, + "step": 340 + }, + { + "epoch": 0.6548247719635142, + "grad_norm": 0.8779292688681924, + "learning_rate": 9.971999038162322e-06, + "loss": 1.275, + "step": 341 + }, + { + "epoch": 0.656745079212674, + "grad_norm": 0.8321554450658822, + "learning_rate": 9.971286349997155e-06, + "loss": 1.4001, + "step": 342 + }, + { + "epoch": 0.6586653864618339, + "grad_norm": 0.6785835670278665, + "learning_rate": 9.970564731795259e-06, + "loss": 1.3563, + "step": 343 + }, + { + "epoch": 0.6605856937109937, + "grad_norm": 0.8154880274231641, + "learning_rate": 9.96983418485289e-06, + "loss": 1.2015, + "step": 344 + }, + { + "epoch": 0.6625060009601537, + "grad_norm": 0.7877809686842095, + "learning_rate": 9.969094710482345e-06, + "loss": 1.3176, + "step": 345 + }, + { + "epoch": 0.6644263082093135, + "grad_norm": 0.737869545031943, + "learning_rate": 9.968346310011965e-06, + "loss": 1.2681, + "step": 346 + }, + { + "epoch": 0.6663466154584734, + "grad_norm": 0.8367016706595558, + "learning_rate": 9.967588984786113e-06, + "loss": 1.3146, + "step": 347 + }, + { + "epoch": 0.6682669227076332, + "grad_norm": 0.7157484065111132, + "learning_rate": 9.966822736165194e-06, + "loss": 1.2279, + "step": 348 + }, + { + "epoch": 0.6701872299567931, + "grad_norm": 0.8340115611171157, + "learning_rate": 9.966047565525636e-06, + "loss": 1.346, + "step": 349 + }, + { + "epoch": 0.672107537205953, + "grad_norm": 0.7964501684625079, + "learning_rate": 9.965263474259896e-06, + "loss": 1.2604, + "step": 350 + }, + { + "epoch": 0.6740278444551128, + "grad_norm": 0.7644952492186728, + "learning_rate": 9.964470463776457e-06, + "loss": 1.2808, + "step": 351 + }, + { + "epoch": 0.6759481517042727, + "grad_norm": 0.715875104396104, + "learning_rate": 9.96366853549982e-06, + "loss": 1.3779, + "step": 352 + }, + { + "epoch": 0.6778684589534325, + "grad_norm": 0.6658897567313692, + "learning_rate": 9.962857690870507e-06, + "loss": 1.3005, + "step": 353 + }, + { + "epoch": 0.6797887662025924, + "grad_norm": 0.7869576623216241, + "learning_rate": 9.962037931345058e-06, + "loss": 1.3772, + "step": 354 + }, + { + "epoch": 0.6817090734517522, + "grad_norm": 0.9271972422960602, + "learning_rate": 9.96120925839603e-06, + "loss": 1.346, + "step": 355 + }, + { + "epoch": 0.6836293807009122, + "grad_norm": 0.7660166493856924, + "learning_rate": 9.96037167351198e-06, + "loss": 1.2745, + "step": 356 + }, + { + "epoch": 0.685549687950072, + "grad_norm": 0.6278698318719681, + "learning_rate": 9.959525178197484e-06, + "loss": 1.1849, + "step": 357 + }, + { + "epoch": 0.6874699951992319, + "grad_norm": 0.8957448047534449, + "learning_rate": 9.958669773973124e-06, + "loss": 1.47, + "step": 358 + }, + { + "epoch": 0.6893903024483917, + "grad_norm": 0.7893290798004599, + "learning_rate": 9.95780546237548e-06, + "loss": 1.4411, + "step": 359 + }, + { + "epoch": 0.6913106096975516, + "grad_norm": 0.6674442252477479, + "learning_rate": 9.956932244957135e-06, + "loss": 1.3468, + "step": 360 + }, + { + "epoch": 0.6932309169467115, + "grad_norm": 0.8637622685527974, + "learning_rate": 9.95605012328667e-06, + "loss": 1.4951, + "step": 361 + }, + { + "epoch": 0.6951512241958714, + "grad_norm": 0.7032170307148827, + "learning_rate": 9.95515909894866e-06, + "loss": 1.2168, + "step": 362 + }, + { + "epoch": 0.6970715314450312, + "grad_norm": 0.7604809716338964, + "learning_rate": 9.954259173543671e-06, + "loss": 1.3405, + "step": 363 + }, + { + "epoch": 0.6989918386941911, + "grad_norm": 0.7356596357988364, + "learning_rate": 9.953350348688264e-06, + "loss": 1.4851, + "step": 364 + }, + { + "epoch": 0.7009121459433509, + "grad_norm": 0.7106099387583814, + "learning_rate": 9.952432626014979e-06, + "loss": 1.4105, + "step": 365 + }, + { + "epoch": 0.7028324531925108, + "grad_norm": 0.7680103674421779, + "learning_rate": 9.951506007172344e-06, + "loss": 1.3767, + "step": 366 + }, + { + "epoch": 0.7047527604416707, + "grad_norm": 0.7045547217675817, + "learning_rate": 9.950570493824864e-06, + "loss": 1.3534, + "step": 367 + }, + { + "epoch": 0.7066730676908305, + "grad_norm": 0.8618867023288301, + "learning_rate": 9.949626087653026e-06, + "loss": 1.5932, + "step": 368 + }, + { + "epoch": 0.7085933749399904, + "grad_norm": 0.7514120877501722, + "learning_rate": 9.948672790353287e-06, + "loss": 1.4226, + "step": 369 + }, + { + "epoch": 0.7105136821891502, + "grad_norm": 0.7707655494026907, + "learning_rate": 9.947710603638078e-06, + "loss": 1.3086, + "step": 370 + }, + { + "epoch": 0.7124339894383102, + "grad_norm": 0.7353314917000675, + "learning_rate": 9.946739529235797e-06, + "loss": 1.3498, + "step": 371 + }, + { + "epoch": 0.71435429668747, + "grad_norm": 0.7677899312066072, + "learning_rate": 9.945759568890804e-06, + "loss": 1.337, + "step": 372 + }, + { + "epoch": 0.7162746039366299, + "grad_norm": 0.7323170602281932, + "learning_rate": 9.944770724363428e-06, + "loss": 1.2262, + "step": 373 + }, + { + "epoch": 0.7181949111857897, + "grad_norm": 0.7030503301748048, + "learning_rate": 9.943772997429955e-06, + "loss": 1.2604, + "step": 374 + }, + { + "epoch": 0.7201152184349496, + "grad_norm": 0.8803804845996765, + "learning_rate": 9.942766389882621e-06, + "loss": 1.3465, + "step": 375 + }, + { + "epoch": 0.7220355256841094, + "grad_norm": 0.765754153505594, + "learning_rate": 9.94175090352962e-06, + "loss": 1.4785, + "step": 376 + }, + { + "epoch": 0.7239558329332694, + "grad_norm": 0.7412100725496786, + "learning_rate": 9.940726540195093e-06, + "loss": 1.3886, + "step": 377 + }, + { + "epoch": 0.7258761401824292, + "grad_norm": 0.7352092180670398, + "learning_rate": 9.939693301719131e-06, + "loss": 1.3787, + "step": 378 + }, + { + "epoch": 0.727796447431589, + "grad_norm": 0.7081810984489154, + "learning_rate": 9.93865118995776e-06, + "loss": 1.2855, + "step": 379 + }, + { + "epoch": 0.7297167546807489, + "grad_norm": 0.721692280601312, + "learning_rate": 9.937600206782951e-06, + "loss": 1.2581, + "step": 380 + }, + { + "epoch": 0.7316370619299087, + "grad_norm": 0.7219716174107607, + "learning_rate": 9.93654035408261e-06, + "loss": 1.3989, + "step": 381 + }, + { + "epoch": 0.7335573691790687, + "grad_norm": 0.808844014227327, + "learning_rate": 9.935471633760572e-06, + "loss": 1.489, + "step": 382 + }, + { + "epoch": 0.7354776764282285, + "grad_norm": 0.6931394591191726, + "learning_rate": 9.934394047736608e-06, + "loss": 1.3596, + "step": 383 + }, + { + "epoch": 0.7373979836773884, + "grad_norm": 0.7255320444997646, + "learning_rate": 9.93330759794641e-06, + "loss": 1.2593, + "step": 384 + }, + { + "epoch": 0.7393182909265482, + "grad_norm": 0.6469865776133421, + "learning_rate": 9.932212286341591e-06, + "loss": 1.4305, + "step": 385 + }, + { + "epoch": 0.7412385981757081, + "grad_norm": 0.7276641692049547, + "learning_rate": 9.931108114889685e-06, + "loss": 1.531, + "step": 386 + }, + { + "epoch": 0.743158905424868, + "grad_norm": 0.7064363862608019, + "learning_rate": 9.929995085574142e-06, + "loss": 1.3905, + "step": 387 + }, + { + "epoch": 0.7450792126740279, + "grad_norm": 0.7331138015593877, + "learning_rate": 9.928873200394323e-06, + "loss": 1.3649, + "step": 388 + }, + { + "epoch": 0.7469995199231877, + "grad_norm": 0.7324112634125343, + "learning_rate": 9.927742461365493e-06, + "loss": 1.4049, + "step": 389 + }, + { + "epoch": 0.7489198271723476, + "grad_norm": 0.7448582260656762, + "learning_rate": 9.926602870518826e-06, + "loss": 1.3451, + "step": 390 + }, + { + "epoch": 0.7508401344215074, + "grad_norm": 0.7290925508892867, + "learning_rate": 9.925454429901397e-06, + "loss": 1.265, + "step": 391 + }, + { + "epoch": 0.7527604416706674, + "grad_norm": 0.8652107575311744, + "learning_rate": 9.924297141576176e-06, + "loss": 1.2601, + "step": 392 + }, + { + "epoch": 0.7546807489198272, + "grad_norm": 0.8112589543786329, + "learning_rate": 9.923131007622027e-06, + "loss": 1.3949, + "step": 393 + }, + { + "epoch": 0.756601056168987, + "grad_norm": 0.7564370059278668, + "learning_rate": 9.9219560301337e-06, + "loss": 1.5582, + "step": 394 + }, + { + "epoch": 0.7585213634181469, + "grad_norm": 0.7217323314506744, + "learning_rate": 9.920772211221841e-06, + "loss": 1.3385, + "step": 395 + }, + { + "epoch": 0.7604416706673067, + "grad_norm": 0.7276996906484348, + "learning_rate": 9.919579553012964e-06, + "loss": 1.3778, + "step": 396 + }, + { + "epoch": 0.7623619779164666, + "grad_norm": 0.7823149650395126, + "learning_rate": 9.918378057649474e-06, + "loss": 1.3767, + "step": 397 + }, + { + "epoch": 0.7642822851656265, + "grad_norm": 0.6630175095699308, + "learning_rate": 9.917167727289641e-06, + "loss": 1.4844, + "step": 398 + }, + { + "epoch": 0.7662025924147864, + "grad_norm": 0.7344567983746931, + "learning_rate": 9.915948564107611e-06, + "loss": 1.3379, + "step": 399 + }, + { + "epoch": 0.7681228996639462, + "grad_norm": 0.7466071399468908, + "learning_rate": 9.914720570293397e-06, + "loss": 1.3972, + "step": 400 + }, + { + "epoch": 0.7700432069131061, + "grad_norm": 0.6949526386517407, + "learning_rate": 9.913483748052871e-06, + "loss": 1.4014, + "step": 401 + }, + { + "epoch": 0.7719635141622659, + "grad_norm": 0.6613334975361209, + "learning_rate": 9.912238099607763e-06, + "loss": 1.2069, + "step": 402 + }, + { + "epoch": 0.7738838214114259, + "grad_norm": 0.6632714829710001, + "learning_rate": 9.910983627195665e-06, + "loss": 1.2427, + "step": 403 + }, + { + "epoch": 0.7758041286605857, + "grad_norm": 0.6899922310091848, + "learning_rate": 9.90972033307001e-06, + "loss": 1.4041, + "step": 404 + }, + { + "epoch": 0.7777244359097456, + "grad_norm": 0.6259530173512385, + "learning_rate": 9.908448219500087e-06, + "loss": 1.0889, + "step": 405 + }, + { + "epoch": 0.7796447431589054, + "grad_norm": 0.7856357851084043, + "learning_rate": 9.90716728877102e-06, + "loss": 1.4446, + "step": 406 + }, + { + "epoch": 0.7815650504080653, + "grad_norm": 0.6391414313005859, + "learning_rate": 9.905877543183776e-06, + "loss": 1.3569, + "step": 407 + }, + { + "epoch": 0.7834853576572252, + "grad_norm": 0.7168868905941123, + "learning_rate": 9.904578985055151e-06, + "loss": 1.3422, + "step": 408 + }, + { + "epoch": 0.785405664906385, + "grad_norm": 0.7244557046923163, + "learning_rate": 9.903271616717782e-06, + "loss": 1.4439, + "step": 409 + }, + { + "epoch": 0.7873259721555449, + "grad_norm": 0.7135679454347851, + "learning_rate": 9.901955440520121e-06, + "loss": 1.417, + "step": 410 + }, + { + "epoch": 0.7892462794047047, + "grad_norm": 0.6975305523738247, + "learning_rate": 9.900630458826443e-06, + "loss": 1.2732, + "step": 411 + }, + { + "epoch": 0.7911665866538646, + "grad_norm": 0.6844791536520864, + "learning_rate": 9.89929667401685e-06, + "loss": 1.1994, + "step": 412 + }, + { + "epoch": 0.7930868939030244, + "grad_norm": 0.6995208409953875, + "learning_rate": 9.897954088487245e-06, + "loss": 1.44, + "step": 413 + }, + { + "epoch": 0.7950072011521844, + "grad_norm": 0.6664160171541003, + "learning_rate": 9.896602704649348e-06, + "loss": 1.3604, + "step": 414 + }, + { + "epoch": 0.7969275084013442, + "grad_norm": 0.6554390013099766, + "learning_rate": 9.89524252493068e-06, + "loss": 1.3002, + "step": 415 + }, + { + "epoch": 0.7988478156505041, + "grad_norm": 0.7714848495681179, + "learning_rate": 9.893873551774561e-06, + "loss": 1.4559, + "step": 416 + }, + { + "epoch": 0.8007681228996639, + "grad_norm": 0.684531285979124, + "learning_rate": 9.892495787640117e-06, + "loss": 1.4116, + "step": 417 + }, + { + "epoch": 0.8026884301488239, + "grad_norm": 0.625351185162249, + "learning_rate": 9.891109235002248e-06, + "loss": 1.2968, + "step": 418 + }, + { + "epoch": 0.8046087373979837, + "grad_norm": 0.6072425157093212, + "learning_rate": 9.889713896351658e-06, + "loss": 1.2834, + "step": 419 + }, + { + "epoch": 0.8065290446471436, + "grad_norm": 0.7557266979543664, + "learning_rate": 9.888309774194822e-06, + "loss": 1.2581, + "step": 420 + }, + { + "epoch": 0.8084493518963034, + "grad_norm": 0.6948893024278046, + "learning_rate": 9.886896871053996e-06, + "loss": 1.3472, + "step": 421 + }, + { + "epoch": 0.8103696591454633, + "grad_norm": 0.782279217584078, + "learning_rate": 9.885475189467217e-06, + "loss": 1.3546, + "step": 422 + }, + { + "epoch": 0.8122899663946231, + "grad_norm": 0.7188760215338773, + "learning_rate": 9.884044731988278e-06, + "loss": 1.3683, + "step": 423 + }, + { + "epoch": 0.814210273643783, + "grad_norm": 0.7215722020243497, + "learning_rate": 9.882605501186747e-06, + "loss": 1.2629, + "step": 424 + }, + { + "epoch": 0.8161305808929429, + "grad_norm": 0.699083447265363, + "learning_rate": 9.881157499647944e-06, + "loss": 1.3218, + "step": 425 + }, + { + "epoch": 0.8180508881421027, + "grad_norm": 0.724984065762376, + "learning_rate": 9.87970072997295e-06, + "loss": 1.3034, + "step": 426 + }, + { + "epoch": 0.8199711953912626, + "grad_norm": 0.7049961948332424, + "learning_rate": 9.878235194778594e-06, + "loss": 1.4015, + "step": 427 + }, + { + "epoch": 0.8218915026404224, + "grad_norm": 0.6961558656551843, + "learning_rate": 9.87676089669745e-06, + "loss": 1.3292, + "step": 428 + }, + { + "epoch": 0.8238118098895824, + "grad_norm": 0.6755568609982437, + "learning_rate": 9.875277838377835e-06, + "loss": 1.2485, + "step": 429 + }, + { + "epoch": 0.8257321171387422, + "grad_norm": 0.731506928442002, + "learning_rate": 9.8737860224838e-06, + "loss": 1.371, + "step": 430 + }, + { + "epoch": 0.8276524243879021, + "grad_norm": 0.8321982922227138, + "learning_rate": 9.872285451695128e-06, + "loss": 1.3981, + "step": 431 + }, + { + "epoch": 0.8295727316370619, + "grad_norm": 0.7315030508325402, + "learning_rate": 9.87077612870733e-06, + "loss": 1.3311, + "step": 432 + }, + { + "epoch": 0.8314930388862218, + "grad_norm": 0.798773303498576, + "learning_rate": 9.869258056231638e-06, + "loss": 1.3727, + "step": 433 + }, + { + "epoch": 0.8334133461353816, + "grad_norm": 0.651844540018506, + "learning_rate": 9.867731236995e-06, + "loss": 1.3471, + "step": 434 + }, + { + "epoch": 0.8353336533845416, + "grad_norm": 0.6771670988304741, + "learning_rate": 9.866195673740076e-06, + "loss": 1.3032, + "step": 435 + }, + { + "epoch": 0.8372539606337014, + "grad_norm": 0.8611651792236157, + "learning_rate": 9.864651369225236e-06, + "loss": 1.2559, + "step": 436 + }, + { + "epoch": 0.8391742678828612, + "grad_norm": 0.7436061953882284, + "learning_rate": 9.863098326224546e-06, + "loss": 1.3166, + "step": 437 + }, + { + "epoch": 0.8410945751320211, + "grad_norm": 0.7409544469403813, + "learning_rate": 9.86153654752778e-06, + "loss": 1.4249, + "step": 438 + }, + { + "epoch": 0.8430148823811809, + "grad_norm": 0.7184845856939001, + "learning_rate": 9.859966035940391e-06, + "loss": 1.3899, + "step": 439 + }, + { + "epoch": 0.8449351896303409, + "grad_norm": 0.7569549756216626, + "learning_rate": 9.858386794283527e-06, + "loss": 1.4622, + "step": 440 + }, + { + "epoch": 0.8468554968795007, + "grad_norm": 0.7230751877755235, + "learning_rate": 9.856798825394017e-06, + "loss": 1.3834, + "step": 441 + }, + { + "epoch": 0.8487758041286606, + "grad_norm": 0.7861385241652444, + "learning_rate": 9.855202132124367e-06, + "loss": 1.4005, + "step": 442 + }, + { + "epoch": 0.8506961113778204, + "grad_norm": 0.654961628331442, + "learning_rate": 9.853596717342751e-06, + "loss": 1.2536, + "step": 443 + }, + { + "epoch": 0.8526164186269803, + "grad_norm": 0.7338664793979522, + "learning_rate": 9.851982583933015e-06, + "loss": 1.4289, + "step": 444 + }, + { + "epoch": 0.8545367258761402, + "grad_norm": 0.647102767057043, + "learning_rate": 9.850359734794664e-06, + "loss": 1.3167, + "step": 445 + }, + { + "epoch": 0.8564570331253001, + "grad_norm": 0.7940838292459027, + "learning_rate": 9.84872817284286e-06, + "loss": 1.4604, + "step": 446 + }, + { + "epoch": 0.8583773403744599, + "grad_norm": 0.7447428727236874, + "learning_rate": 9.847087901008415e-06, + "loss": 1.392, + "step": 447 + }, + { + "epoch": 0.8602976476236198, + "grad_norm": 0.7643508165383381, + "learning_rate": 9.845438922237787e-06, + "loss": 1.3017, + "step": 448 + }, + { + "epoch": 0.8622179548727796, + "grad_norm": 0.7617962346454569, + "learning_rate": 9.843781239493076e-06, + "loss": 1.6087, + "step": 449 + }, + { + "epoch": 0.8641382621219396, + "grad_norm": 0.7385042271210234, + "learning_rate": 9.842114855752013e-06, + "loss": 1.287, + "step": 450 + }, + { + "epoch": 0.8660585693710994, + "grad_norm": 0.7904769194673538, + "learning_rate": 9.840439774007963e-06, + "loss": 1.3847, + "step": 451 + }, + { + "epoch": 0.8679788766202592, + "grad_norm": 0.7796917727237478, + "learning_rate": 9.838755997269917e-06, + "loss": 1.4052, + "step": 452 + }, + { + "epoch": 0.8698991838694191, + "grad_norm": 0.6685557701211076, + "learning_rate": 9.837063528562479e-06, + "loss": 1.2191, + "step": 453 + }, + { + "epoch": 0.8718194911185789, + "grad_norm": 0.682007015556512, + "learning_rate": 9.835362370925868e-06, + "loss": 1.4041, + "step": 454 + }, + { + "epoch": 0.8737397983677389, + "grad_norm": 0.6809420760471568, + "learning_rate": 9.833652527415918e-06, + "loss": 1.1179, + "step": 455 + }, + { + "epoch": 0.8756601056168987, + "grad_norm": 0.7496451496119366, + "learning_rate": 9.831934001104056e-06, + "loss": 1.1863, + "step": 456 + }, + { + "epoch": 0.8775804128660586, + "grad_norm": 0.7958820882337777, + "learning_rate": 9.830206795077313e-06, + "loss": 1.4097, + "step": 457 + }, + { + "epoch": 0.8795007201152184, + "grad_norm": 0.7760893359826682, + "learning_rate": 9.828470912438308e-06, + "loss": 1.4765, + "step": 458 + }, + { + "epoch": 0.8814210273643783, + "grad_norm": 0.7181251163054755, + "learning_rate": 9.826726356305248e-06, + "loss": 1.2336, + "step": 459 + }, + { + "epoch": 0.8833413346135381, + "grad_norm": 0.7130113363618711, + "learning_rate": 9.824973129811919e-06, + "loss": 1.3356, + "step": 460 + }, + { + "epoch": 0.8852616418626981, + "grad_norm": 0.6976783021258673, + "learning_rate": 9.823211236107684e-06, + "loss": 1.5375, + "step": 461 + }, + { + "epoch": 0.8871819491118579, + "grad_norm": 0.6494482314881563, + "learning_rate": 9.82144067835747e-06, + "loss": 1.4216, + "step": 462 + }, + { + "epoch": 0.8891022563610178, + "grad_norm": 0.6799368531582445, + "learning_rate": 9.819661459741774e-06, + "loss": 1.2607, + "step": 463 + }, + { + "epoch": 0.8910225636101776, + "grad_norm": 0.7200924252996468, + "learning_rate": 9.817873583456646e-06, + "loss": 1.3954, + "step": 464 + }, + { + "epoch": 0.8929428708593375, + "grad_norm": 0.723830426362561, + "learning_rate": 9.816077052713689e-06, + "loss": 1.2634, + "step": 465 + }, + { + "epoch": 0.8948631781084974, + "grad_norm": 0.6795402172591166, + "learning_rate": 9.814271870740054e-06, + "loss": 1.288, + "step": 466 + }, + { + "epoch": 0.8967834853576572, + "grad_norm": 0.7798610485610813, + "learning_rate": 9.812458040778433e-06, + "loss": 1.3816, + "step": 467 + }, + { + "epoch": 0.8987037926068171, + "grad_norm": 0.6598552167407624, + "learning_rate": 9.810635566087046e-06, + "loss": 1.3142, + "step": 468 + }, + { + "epoch": 0.9006240998559769, + "grad_norm": 0.8267440234447042, + "learning_rate": 9.808804449939649e-06, + "loss": 1.3952, + "step": 469 + }, + { + "epoch": 0.9025444071051368, + "grad_norm": 0.6831251782753334, + "learning_rate": 9.806964695625521e-06, + "loss": 1.3507, + "step": 470 + }, + { + "epoch": 0.9044647143542967, + "grad_norm": 0.6913550521559437, + "learning_rate": 9.80511630644945e-06, + "loss": 1.3858, + "step": 471 + }, + { + "epoch": 0.9063850216034566, + "grad_norm": 0.651357091385134, + "learning_rate": 9.803259285731744e-06, + "loss": 1.3119, + "step": 472 + }, + { + "epoch": 0.9083053288526164, + "grad_norm": 0.6876771805281661, + "learning_rate": 9.801393636808213e-06, + "loss": 1.405, + "step": 473 + }, + { + "epoch": 0.9102256361017763, + "grad_norm": 0.6704161705700665, + "learning_rate": 9.79951936303016e-06, + "loss": 1.1645, + "step": 474 + }, + { + "epoch": 0.9121459433509361, + "grad_norm": 0.8403757549868232, + "learning_rate": 9.797636467764392e-06, + "loss": 1.3374, + "step": 475 + }, + { + "epoch": 0.914066250600096, + "grad_norm": 0.6976996239643926, + "learning_rate": 9.795744954393193e-06, + "loss": 1.2789, + "step": 476 + }, + { + "epoch": 0.9159865578492559, + "grad_norm": 0.7007105159698541, + "learning_rate": 9.793844826314338e-06, + "loss": 1.2513, + "step": 477 + }, + { + "epoch": 0.9179068650984158, + "grad_norm": 0.8046443692914484, + "learning_rate": 9.791936086941065e-06, + "loss": 1.4276, + "step": 478 + }, + { + "epoch": 0.9198271723475756, + "grad_norm": 0.6689604084398844, + "learning_rate": 9.790018739702091e-06, + "loss": 1.1329, + "step": 479 + }, + { + "epoch": 0.9217474795967355, + "grad_norm": 0.7418530237185074, + "learning_rate": 9.788092788041589e-06, + "loss": 1.2312, + "step": 480 + }, + { + "epoch": 0.9236677868458953, + "grad_norm": 0.7344158824384261, + "learning_rate": 9.78615823541919e-06, + "loss": 1.5327, + "step": 481 + }, + { + "epoch": 0.9255880940950552, + "grad_norm": 0.7029451657520306, + "learning_rate": 9.784215085309977e-06, + "loss": 1.3297, + "step": 482 + }, + { + "epoch": 0.9275084013442151, + "grad_norm": 0.8107964882673727, + "learning_rate": 9.782263341204477e-06, + "loss": 1.2561, + "step": 483 + }, + { + "epoch": 0.9294287085933749, + "grad_norm": 0.7114545290156662, + "learning_rate": 9.78030300660865e-06, + "loss": 1.3645, + "step": 484 + }, + { + "epoch": 0.9313490158425348, + "grad_norm": 0.7402003366232237, + "learning_rate": 9.77833408504389e-06, + "loss": 1.3642, + "step": 485 + }, + { + "epoch": 0.9332693230916946, + "grad_norm": 0.7109408774118504, + "learning_rate": 9.77635658004702e-06, + "loss": 1.3221, + "step": 486 + }, + { + "epoch": 0.9351896303408546, + "grad_norm": 0.7433358040489166, + "learning_rate": 9.774370495170276e-06, + "loss": 1.4449, + "step": 487 + }, + { + "epoch": 0.9371099375900144, + "grad_norm": 0.6785055655567391, + "learning_rate": 9.772375833981306e-06, + "loss": 1.3555, + "step": 488 + }, + { + "epoch": 0.9390302448391743, + "grad_norm": 0.76870851128488, + "learning_rate": 9.770372600063172e-06, + "loss": 1.284, + "step": 489 + }, + { + "epoch": 0.9409505520883341, + "grad_norm": 0.7344357851176699, + "learning_rate": 9.768360797014325e-06, + "loss": 1.2853, + "step": 490 + }, + { + "epoch": 0.942870859337494, + "grad_norm": 0.8470942465410728, + "learning_rate": 9.766340428448614e-06, + "loss": 1.3829, + "step": 491 + }, + { + "epoch": 0.9447911665866539, + "grad_norm": 0.7211389944931649, + "learning_rate": 9.764311497995272e-06, + "loss": 1.2677, + "step": 492 + }, + { + "epoch": 0.9467114738358138, + "grad_norm": 0.7084359929065828, + "learning_rate": 9.762274009298918e-06, + "loss": 1.2434, + "step": 493 + }, + { + "epoch": 0.9486317810849736, + "grad_norm": 0.7689934246592068, + "learning_rate": 9.760227966019537e-06, + "loss": 1.4095, + "step": 494 + }, + { + "epoch": 0.9505520883341335, + "grad_norm": 0.7773642092371199, + "learning_rate": 9.758173371832485e-06, + "loss": 1.3244, + "step": 495 + }, + { + "epoch": 0.9524723955832933, + "grad_norm": 0.6978701658115153, + "learning_rate": 9.756110230428476e-06, + "loss": 1.2787, + "step": 496 + }, + { + "epoch": 0.9543927028324531, + "grad_norm": 0.6910966359494893, + "learning_rate": 9.75403854551358e-06, + "loss": 1.3348, + "step": 497 + }, + { + "epoch": 0.9563130100816131, + "grad_norm": 0.732636720833676, + "learning_rate": 9.751958320809213e-06, + "loss": 1.2403, + "step": 498 + }, + { + "epoch": 0.9582333173307729, + "grad_norm": 0.7804889809056719, + "learning_rate": 9.749869560052128e-06, + "loss": 1.1905, + "step": 499 + }, + { + "epoch": 0.9601536245799328, + "grad_norm": 0.7286628977028098, + "learning_rate": 9.747772266994418e-06, + "loss": 1.3252, + "step": 500 + }, + { + "epoch": 0.9620739318290926, + "grad_norm": 0.730360159535326, + "learning_rate": 9.745666445403496e-06, + "loss": 1.4712, + "step": 501 + }, + { + "epoch": 0.9639942390782525, + "grad_norm": 0.6650898577066633, + "learning_rate": 9.7435520990621e-06, + "loss": 1.2945, + "step": 502 + }, + { + "epoch": 0.9659145463274124, + "grad_norm": 0.6813965501305161, + "learning_rate": 9.741429231768278e-06, + "loss": 1.3214, + "step": 503 + }, + { + "epoch": 0.9678348535765723, + "grad_norm": 0.8240692929170976, + "learning_rate": 9.739297847335387e-06, + "loss": 1.4367, + "step": 504 + }, + { + "epoch": 0.9697551608257321, + "grad_norm": 0.7664659172540482, + "learning_rate": 9.73715794959208e-06, + "loss": 1.2429, + "step": 505 + }, + { + "epoch": 0.971675468074892, + "grad_norm": 0.663273604561841, + "learning_rate": 9.735009542382308e-06, + "loss": 1.2678, + "step": 506 + }, + { + "epoch": 0.9735957753240518, + "grad_norm": 0.7286317666999398, + "learning_rate": 9.732852629565302e-06, + "loss": 1.351, + "step": 507 + }, + { + "epoch": 0.9755160825732118, + "grad_norm": 0.7222364516570275, + "learning_rate": 9.730687215015576e-06, + "loss": 1.3875, + "step": 508 + }, + { + "epoch": 0.9774363898223716, + "grad_norm": 0.792789498600007, + "learning_rate": 9.728513302622911e-06, + "loss": 1.4158, + "step": 509 + }, + { + "epoch": 0.9793566970715314, + "grad_norm": 0.6898048543889689, + "learning_rate": 9.72633089629236e-06, + "loss": 1.3018, + "step": 510 + }, + { + "epoch": 0.9812770043206913, + "grad_norm": 0.7080789810250435, + "learning_rate": 9.72413999994423e-06, + "loss": 1.2951, + "step": 511 + }, + { + "epoch": 0.9831973115698511, + "grad_norm": 0.6471793883594157, + "learning_rate": 9.721940617514076e-06, + "loss": 1.1768, + "step": 512 + }, + { + "epoch": 0.985117618819011, + "grad_norm": 0.6848848680839071, + "learning_rate": 9.719732752952702e-06, + "loss": 1.262, + "step": 513 + }, + { + "epoch": 0.9870379260681709, + "grad_norm": 0.7903965882462866, + "learning_rate": 9.717516410226144e-06, + "loss": 1.4717, + "step": 514 + }, + { + "epoch": 0.9889582333173308, + "grad_norm": 0.7404310033314039, + "learning_rate": 9.715291593315672e-06, + "loss": 1.3879, + "step": 515 + }, + { + "epoch": 0.9908785405664906, + "grad_norm": 0.735452133325044, + "learning_rate": 9.713058306217776e-06, + "loss": 1.3079, + "step": 516 + }, + { + "epoch": 0.9927988478156505, + "grad_norm": 0.8130352152653534, + "learning_rate": 9.710816552944157e-06, + "loss": 1.434, + "step": 517 + }, + { + "epoch": 0.9947191550648103, + "grad_norm": 0.7502971580652452, + "learning_rate": 9.708566337521736e-06, + "loss": 1.3013, + "step": 518 + }, + { + "epoch": 0.9966394623139703, + "grad_norm": 0.6582057806093718, + "learning_rate": 9.70630766399262e-06, + "loss": 1.2994, + "step": 519 + }, + { + "epoch": 0.9985597695631301, + "grad_norm": 0.7398007707770013, + "learning_rate": 9.70404053641412e-06, + "loss": 1.3135, + "step": 520 + }, + { + "epoch": 1.0, + "grad_norm": 0.7398007707770013, + "learning_rate": 9.701764958858729e-06, + "loss": 1.1265, + "step": 521 + }, + { + "epoch": 1.0019203072491598, + "grad_norm": 0.914948928730533, + "learning_rate": 9.69948093541412e-06, + "loss": 1.1908, + "step": 522 + }, + { + "epoch": 1.0038406144983196, + "grad_norm": 0.7588233450162211, + "learning_rate": 9.697188470183137e-06, + "loss": 1.2765, + "step": 523 + }, + { + "epoch": 1.0057609217474797, + "grad_norm": 0.6899163901372742, + "learning_rate": 9.694887567283786e-06, + "loss": 1.262, + "step": 524 + }, + { + "epoch": 1.0076812289966395, + "grad_norm": 0.7729265974237742, + "learning_rate": 9.692578230849237e-06, + "loss": 1.3718, + "step": 525 + }, + { + "epoch": 1.0096015362457993, + "grad_norm": 0.76124735458907, + "learning_rate": 9.690260465027802e-06, + "loss": 1.3091, + "step": 526 + }, + { + "epoch": 1.011521843494959, + "grad_norm": 0.8261719411142489, + "learning_rate": 9.687934273982934e-06, + "loss": 1.2777, + "step": 527 + }, + { + "epoch": 1.0134421507441191, + "grad_norm": 0.6830001963579748, + "learning_rate": 9.68559966189323e-06, + "loss": 1.2425, + "step": 528 + }, + { + "epoch": 1.015362457993279, + "grad_norm": 0.7028896372705892, + "learning_rate": 9.683256632952402e-06, + "loss": 1.3801, + "step": 529 + }, + { + "epoch": 1.0172827652424388, + "grad_norm": 0.8453182259497111, + "learning_rate": 9.680905191369293e-06, + "loss": 1.335, + "step": 530 + }, + { + "epoch": 1.0192030724915986, + "grad_norm": 0.7691091692417015, + "learning_rate": 9.678545341367846e-06, + "loss": 1.2368, + "step": 531 + }, + { + "epoch": 1.0211233797407586, + "grad_norm": 0.6781623931537163, + "learning_rate": 9.67617708718712e-06, + "loss": 1.2796, + "step": 532 + }, + { + "epoch": 1.0230436869899184, + "grad_norm": 0.7367258690005868, + "learning_rate": 9.67380043308126e-06, + "loss": 1.2936, + "step": 533 + }, + { + "epoch": 1.0249639942390782, + "grad_norm": 0.691600698798815, + "learning_rate": 9.671415383319507e-06, + "loss": 1.5315, + "step": 534 + }, + { + "epoch": 1.026884301488238, + "grad_norm": 0.7722544599357118, + "learning_rate": 9.66902194218618e-06, + "loss": 1.3265, + "step": 535 + }, + { + "epoch": 1.0288046087373979, + "grad_norm": 0.7907265691669353, + "learning_rate": 9.666620113980673e-06, + "loss": 1.5249, + "step": 536 + }, + { + "epoch": 1.030724915986558, + "grad_norm": 0.8296730708074218, + "learning_rate": 9.664209903017447e-06, + "loss": 1.36, + "step": 537 + }, + { + "epoch": 1.0326452232357177, + "grad_norm": 0.6882651452466539, + "learning_rate": 9.661791313626019e-06, + "loss": 1.1086, + "step": 538 + }, + { + "epoch": 1.0345655304848775, + "grad_norm": 0.7450250373498115, + "learning_rate": 9.659364350150955e-06, + "loss": 1.1367, + "step": 539 + }, + { + "epoch": 1.0364858377340374, + "grad_norm": 0.7120678514024298, + "learning_rate": 9.656929016951869e-06, + "loss": 1.1975, + "step": 540 + }, + { + "epoch": 1.0384061449831974, + "grad_norm": 0.771727566325184, + "learning_rate": 9.654485318403404e-06, + "loss": 1.1875, + "step": 541 + }, + { + "epoch": 1.0403264522323572, + "grad_norm": 0.6961127537941473, + "learning_rate": 9.652033258895233e-06, + "loss": 1.2588, + "step": 542 + }, + { + "epoch": 1.042246759481517, + "grad_norm": 0.7479292355314183, + "learning_rate": 9.649572842832048e-06, + "loss": 1.4162, + "step": 543 + }, + { + "epoch": 1.0441670667306768, + "grad_norm": 0.8317819644739988, + "learning_rate": 9.64710407463355e-06, + "loss": 1.2725, + "step": 544 + }, + { + "epoch": 1.0460873739798369, + "grad_norm": 0.7604838992261496, + "learning_rate": 9.644626958734447e-06, + "loss": 1.1814, + "step": 545 + }, + { + "epoch": 1.0480076812289967, + "grad_norm": 0.7495453422521224, + "learning_rate": 9.642141499584436e-06, + "loss": 1.2463, + "step": 546 + }, + { + "epoch": 1.0499279884781565, + "grad_norm": 0.7152271852476584, + "learning_rate": 9.639647701648206e-06, + "loss": 1.3762, + "step": 547 + }, + { + "epoch": 1.0518482957273163, + "grad_norm": 0.6919477745781931, + "learning_rate": 9.637145569405426e-06, + "loss": 1.2951, + "step": 548 + }, + { + "epoch": 1.0537686029764763, + "grad_norm": 0.5919052540740619, + "learning_rate": 9.634635107350731e-06, + "loss": 1.1825, + "step": 549 + }, + { + "epoch": 1.0556889102256362, + "grad_norm": 0.6527783671699664, + "learning_rate": 9.632116319993726e-06, + "loss": 1.4155, + "step": 550 + }, + { + "epoch": 1.057609217474796, + "grad_norm": 0.8062261860338987, + "learning_rate": 9.629589211858963e-06, + "loss": 1.2512, + "step": 551 + }, + { + "epoch": 1.0595295247239558, + "grad_norm": 0.8008581715050265, + "learning_rate": 9.627053787485944e-06, + "loss": 1.3264, + "step": 552 + }, + { + "epoch": 1.0614498319731156, + "grad_norm": 0.6973877309767292, + "learning_rate": 9.624510051429116e-06, + "loss": 1.4041, + "step": 553 + }, + { + "epoch": 1.0633701392222756, + "grad_norm": 0.8158929404093554, + "learning_rate": 9.621958008257848e-06, + "loss": 1.3745, + "step": 554 + }, + { + "epoch": 1.0652904464714354, + "grad_norm": 0.6809292868930115, + "learning_rate": 9.619397662556434e-06, + "loss": 1.1379, + "step": 555 + }, + { + "epoch": 1.0672107537205953, + "grad_norm": 0.7614958232951388, + "learning_rate": 9.616829018924083e-06, + "loss": 1.4064, + "step": 556 + }, + { + "epoch": 1.069131060969755, + "grad_norm": 0.7494950717531818, + "learning_rate": 9.614252081974908e-06, + "loss": 1.4378, + "step": 557 + }, + { + "epoch": 1.0710513682189151, + "grad_norm": 0.6009709835822509, + "learning_rate": 9.61166685633792e-06, + "loss": 1.306, + "step": 558 + }, + { + "epoch": 1.072971675468075, + "grad_norm": 0.7353167635763869, + "learning_rate": 9.609073346657021e-06, + "loss": 1.3607, + "step": 559 + }, + { + "epoch": 1.0748919827172347, + "grad_norm": 0.9417037901973762, + "learning_rate": 9.606471557590992e-06, + "loss": 1.3322, + "step": 560 + }, + { + "epoch": 1.0768122899663946, + "grad_norm": 0.6799455391858547, + "learning_rate": 9.603861493813486e-06, + "loss": 1.2997, + "step": 561 + }, + { + "epoch": 1.0787325972155546, + "grad_norm": 0.7062230147048892, + "learning_rate": 9.601243160013023e-06, + "loss": 1.2617, + "step": 562 + }, + { + "epoch": 1.0806529044647144, + "grad_norm": 0.7674724288187535, + "learning_rate": 9.598616560892977e-06, + "loss": 1.3903, + "step": 563 + }, + { + "epoch": 1.0825732117138742, + "grad_norm": 0.683909615499401, + "learning_rate": 9.595981701171564e-06, + "loss": 1.0946, + "step": 564 + }, + { + "epoch": 1.084493518963034, + "grad_norm": 0.7398808578213143, + "learning_rate": 9.593338585581848e-06, + "loss": 1.1715, + "step": 565 + }, + { + "epoch": 1.086413826212194, + "grad_norm": 0.7047962556591744, + "learning_rate": 9.59068721887172e-06, + "loss": 1.2756, + "step": 566 + }, + { + "epoch": 1.0883341334613539, + "grad_norm": 0.6315097473402969, + "learning_rate": 9.588027605803886e-06, + "loss": 1.2555, + "step": 567 + }, + { + "epoch": 1.0902544407105137, + "grad_norm": 0.7234467460996638, + "learning_rate": 9.585359751155874e-06, + "loss": 1.3359, + "step": 568 + }, + { + "epoch": 1.0921747479596735, + "grad_norm": 0.6076408944277064, + "learning_rate": 9.582683659720013e-06, + "loss": 1.2367, + "step": 569 + }, + { + "epoch": 1.0940950552088333, + "grad_norm": 0.6174139463787864, + "learning_rate": 9.579999336303427e-06, + "loss": 1.3522, + "step": 570 + }, + { + "epoch": 1.0960153624579934, + "grad_norm": 0.7480007037028495, + "learning_rate": 9.577306785728028e-06, + "loss": 1.2621, + "step": 571 + }, + { + "epoch": 1.0979356697071532, + "grad_norm": 0.7643115891903187, + "learning_rate": 9.574606012830509e-06, + "loss": 1.2505, + "step": 572 + }, + { + "epoch": 1.099855976956313, + "grad_norm": 0.6563832097104907, + "learning_rate": 9.571897022462329e-06, + "loss": 1.2947, + "step": 573 + }, + { + "epoch": 1.1017762842054728, + "grad_norm": 0.7178248398921613, + "learning_rate": 9.569179819489712e-06, + "loss": 1.3066, + "step": 574 + }, + { + "epoch": 1.1036965914546328, + "grad_norm": 0.7083396283957663, + "learning_rate": 9.566454408793628e-06, + "loss": 1.2919, + "step": 575 + }, + { + "epoch": 1.1056168987037926, + "grad_norm": 0.6599770996680595, + "learning_rate": 9.563720795269801e-06, + "loss": 1.3526, + "step": 576 + }, + { + "epoch": 1.1075372059529525, + "grad_norm": 0.708272003033926, + "learning_rate": 9.560978983828682e-06, + "loss": 1.243, + "step": 577 + }, + { + "epoch": 1.1094575132021123, + "grad_norm": 0.7309704918989066, + "learning_rate": 9.558228979395448e-06, + "loss": 1.164, + "step": 578 + }, + { + "epoch": 1.1113778204512723, + "grad_norm": 0.6947735355279264, + "learning_rate": 9.55547078691e-06, + "loss": 1.2518, + "step": 579 + }, + { + "epoch": 1.1132981277004321, + "grad_norm": 0.6205776923415179, + "learning_rate": 9.552704411326938e-06, + "loss": 1.264, + "step": 580 + }, + { + "epoch": 1.115218434949592, + "grad_norm": 0.6848751463529882, + "learning_rate": 9.549929857615571e-06, + "loss": 1.3857, + "step": 581 + }, + { + "epoch": 1.1171387421987518, + "grad_norm": 0.6873124432568535, + "learning_rate": 9.547147130759894e-06, + "loss": 1.2951, + "step": 582 + }, + { + "epoch": 1.1190590494479116, + "grad_norm": 0.6699975273598144, + "learning_rate": 9.54435623575858e-06, + "loss": 1.2551, + "step": 583 + }, + { + "epoch": 1.1209793566970716, + "grad_norm": 0.6779630282881215, + "learning_rate": 9.541557177624978e-06, + "loss": 1.3882, + "step": 584 + }, + { + "epoch": 1.1228996639462314, + "grad_norm": 0.6922291738327386, + "learning_rate": 9.538749961387106e-06, + "loss": 1.3379, + "step": 585 + }, + { + "epoch": 1.1248199711953912, + "grad_norm": 0.6743878536243224, + "learning_rate": 9.535934592087627e-06, + "loss": 1.243, + "step": 586 + }, + { + "epoch": 1.126740278444551, + "grad_norm": 0.7100328653945479, + "learning_rate": 9.533111074783857e-06, + "loss": 1.3178, + "step": 587 + }, + { + "epoch": 1.128660585693711, + "grad_norm": 0.604519259408001, + "learning_rate": 9.530279414547743e-06, + "loss": 1.3051, + "step": 588 + }, + { + "epoch": 1.130580892942871, + "grad_norm": 0.6526520665140377, + "learning_rate": 9.527439616465864e-06, + "loss": 1.4662, + "step": 589 + }, + { + "epoch": 1.1325012001920307, + "grad_norm": 0.6906072243725673, + "learning_rate": 9.524591685639414e-06, + "loss": 1.3682, + "step": 590 + }, + { + "epoch": 1.1344215074411905, + "grad_norm": 0.677843087192056, + "learning_rate": 9.521735627184197e-06, + "loss": 1.2735, + "step": 591 + }, + { + "epoch": 1.1363418146903506, + "grad_norm": 0.6367565663261762, + "learning_rate": 9.518871446230616e-06, + "loss": 1.3467, + "step": 592 + }, + { + "epoch": 1.1382621219395104, + "grad_norm": 0.8296312537408668, + "learning_rate": 9.515999147923666e-06, + "loss": 1.3932, + "step": 593 + }, + { + "epoch": 1.1401824291886702, + "grad_norm": 0.6513452900246463, + "learning_rate": 9.513118737422926e-06, + "loss": 1.2055, + "step": 594 + }, + { + "epoch": 1.14210273643783, + "grad_norm": 0.6216514451209604, + "learning_rate": 9.51023021990254e-06, + "loss": 1.2417, + "step": 595 + }, + { + "epoch": 1.1440230436869898, + "grad_norm": 0.7531692439045267, + "learning_rate": 9.50733360055122e-06, + "loss": 1.2386, + "step": 596 + }, + { + "epoch": 1.1459433509361499, + "grad_norm": 0.683513142501149, + "learning_rate": 9.50442888457223e-06, + "loss": 1.2326, + "step": 597 + }, + { + "epoch": 1.1478636581853097, + "grad_norm": 0.6594339187726981, + "learning_rate": 9.501516077183381e-06, + "loss": 1.2825, + "step": 598 + }, + { + "epoch": 1.1497839654344695, + "grad_norm": 0.7622581730719366, + "learning_rate": 9.498595183617014e-06, + "loss": 1.2091, + "step": 599 + }, + { + "epoch": 1.1517042726836293, + "grad_norm": 0.7667615881022617, + "learning_rate": 9.495666209119998e-06, + "loss": 1.4263, + "step": 600 + }, + { + "epoch": 1.1536245799327893, + "grad_norm": 0.6614445792795562, + "learning_rate": 9.492729158953717e-06, + "loss": 1.3555, + "step": 601 + }, + { + "epoch": 1.1555448871819491, + "grad_norm": 0.6775001666189906, + "learning_rate": 9.489784038394065e-06, + "loss": 1.2491, + "step": 602 + }, + { + "epoch": 1.157465194431109, + "grad_norm": 0.7044086225702691, + "learning_rate": 9.486830852731428e-06, + "loss": 1.2622, + "step": 603 + }, + { + "epoch": 1.1593855016802688, + "grad_norm": 0.7464552469199642, + "learning_rate": 9.48386960727068e-06, + "loss": 1.3274, + "step": 604 + }, + { + "epoch": 1.1613058089294288, + "grad_norm": 0.832385639864367, + "learning_rate": 9.48090030733118e-06, + "loss": 1.2453, + "step": 605 + }, + { + "epoch": 1.1632261161785886, + "grad_norm": 0.6252343436809756, + "learning_rate": 9.477922958246747e-06, + "loss": 1.1933, + "step": 606 + }, + { + "epoch": 1.1651464234277484, + "grad_norm": 0.7355703889077223, + "learning_rate": 9.47493756536566e-06, + "loss": 1.306, + "step": 607 + }, + { + "epoch": 1.1670667306769082, + "grad_norm": 0.7273603661247848, + "learning_rate": 9.471944134050652e-06, + "loss": 1.3707, + "step": 608 + }, + { + "epoch": 1.168987037926068, + "grad_norm": 0.7562042075996765, + "learning_rate": 9.468942669678893e-06, + "loss": 1.3688, + "step": 609 + }, + { + "epoch": 1.170907345175228, + "grad_norm": 0.802848759502404, + "learning_rate": 9.465933177641981e-06, + "loss": 1.2193, + "step": 610 + }, + { + "epoch": 1.172827652424388, + "grad_norm": 0.7609509108594381, + "learning_rate": 9.46291566334594e-06, + "loss": 1.3736, + "step": 611 + }, + { + "epoch": 1.1747479596735477, + "grad_norm": 0.8308878738963801, + "learning_rate": 9.459890132211198e-06, + "loss": 1.3762, + "step": 612 + }, + { + "epoch": 1.1766682669227075, + "grad_norm": 0.6532589559736274, + "learning_rate": 9.456856589672587e-06, + "loss": 1.3125, + "step": 613 + }, + { + "epoch": 1.1785885741718676, + "grad_norm": 0.7153467048815468, + "learning_rate": 9.453815041179329e-06, + "loss": 1.4605, + "step": 614 + }, + { + "epoch": 1.1805088814210274, + "grad_norm": 0.8574229555342932, + "learning_rate": 9.450765492195027e-06, + "loss": 1.2833, + "step": 615 + }, + { + "epoch": 1.1824291886701872, + "grad_norm": 0.7052192727678307, + "learning_rate": 9.44770794819766e-06, + "loss": 1.426, + "step": 616 + }, + { + "epoch": 1.184349495919347, + "grad_norm": 0.8135394020685827, + "learning_rate": 9.444642414679563e-06, + "loss": 1.3768, + "step": 617 + }, + { + "epoch": 1.186269803168507, + "grad_norm": 0.6225214032963996, + "learning_rate": 9.441568897147423e-06, + "loss": 1.3118, + "step": 618 + }, + { + "epoch": 1.1881901104176669, + "grad_norm": 0.72856086645004, + "learning_rate": 9.438487401122277e-06, + "loss": 1.2549, + "step": 619 + }, + { + "epoch": 1.1901104176668267, + "grad_norm": 0.7806208477609391, + "learning_rate": 9.435397932139478e-06, + "loss": 1.2322, + "step": 620 + }, + { + "epoch": 1.1920307249159865, + "grad_norm": 0.6519805710347557, + "learning_rate": 9.432300495748719e-06, + "loss": 1.3982, + "step": 621 + }, + { + "epoch": 1.1939510321651463, + "grad_norm": 0.7771841737007339, + "learning_rate": 9.429195097513993e-06, + "loss": 1.3656, + "step": 622 + }, + { + "epoch": 1.1958713394143063, + "grad_norm": 0.7437011183267965, + "learning_rate": 9.426081743013599e-06, + "loss": 1.222, + "step": 623 + }, + { + "epoch": 1.1977916466634662, + "grad_norm": 0.5875165599100549, + "learning_rate": 9.422960437840128e-06, + "loss": 1.1133, + "step": 624 + }, + { + "epoch": 1.199711953912626, + "grad_norm": 0.7624140053435047, + "learning_rate": 9.419831187600453e-06, + "loss": 1.3283, + "step": 625 + }, + { + "epoch": 1.201632261161786, + "grad_norm": 0.7653770887737222, + "learning_rate": 9.416693997915717e-06, + "loss": 1.1884, + "step": 626 + }, + { + "epoch": 1.2035525684109458, + "grad_norm": 0.6152692591706712, + "learning_rate": 9.41354887442133e-06, + "loss": 1.2304, + "step": 627 + }, + { + "epoch": 1.2054728756601056, + "grad_norm": 0.8185718374516386, + "learning_rate": 9.410395822766946e-06, + "loss": 1.4138, + "step": 628 + }, + { + "epoch": 1.2073931829092654, + "grad_norm": 0.7082210656201997, + "learning_rate": 9.407234848616467e-06, + "loss": 1.2941, + "step": 629 + }, + { + "epoch": 1.2093134901584253, + "grad_norm": 0.6232135092488823, + "learning_rate": 9.404065957648023e-06, + "loss": 1.2503, + "step": 630 + }, + { + "epoch": 1.2112337974075853, + "grad_norm": 0.654358423334644, + "learning_rate": 9.400889155553969e-06, + "loss": 1.2953, + "step": 631 + }, + { + "epoch": 1.2131541046567451, + "grad_norm": 0.8666882012026393, + "learning_rate": 9.397704448040865e-06, + "loss": 1.2951, + "step": 632 + }, + { + "epoch": 1.215074411905905, + "grad_norm": 0.799703690690942, + "learning_rate": 9.394511840829474e-06, + "loss": 1.1688, + "step": 633 + }, + { + "epoch": 1.2169947191550647, + "grad_norm": 0.5744242469127047, + "learning_rate": 9.391311339654755e-06, + "loss": 1.361, + "step": 634 + }, + { + "epoch": 1.2189150264042246, + "grad_norm": 0.6674313237003483, + "learning_rate": 9.388102950265836e-06, + "loss": 1.1743, + "step": 635 + }, + { + "epoch": 1.2208353336533846, + "grad_norm": 0.7039450941038129, + "learning_rate": 9.384886678426027e-06, + "loss": 1.3431, + "step": 636 + }, + { + "epoch": 1.2227556409025444, + "grad_norm": 0.7752002897514472, + "learning_rate": 9.381662529912787e-06, + "loss": 1.3558, + "step": 637 + }, + { + "epoch": 1.2246759481517042, + "grad_norm": 0.7005182546751737, + "learning_rate": 9.378430510517732e-06, + "loss": 1.4033, + "step": 638 + }, + { + "epoch": 1.2265962554008643, + "grad_norm": 0.7688364382975947, + "learning_rate": 9.37519062604661e-06, + "loss": 1.3613, + "step": 639 + }, + { + "epoch": 1.228516562650024, + "grad_norm": 0.7239757068116605, + "learning_rate": 9.371942882319306e-06, + "loss": 1.3262, + "step": 640 + }, + { + "epoch": 1.2304368698991839, + "grad_norm": 0.6873674276530499, + "learning_rate": 9.36868728516981e-06, + "loss": 1.1821, + "step": 641 + }, + { + "epoch": 1.2323571771483437, + "grad_norm": 0.6742489136796876, + "learning_rate": 9.36542384044623e-06, + "loss": 1.1827, + "step": 642 + }, + { + "epoch": 1.2342774843975035, + "grad_norm": 0.76768397108436, + "learning_rate": 9.36215255401077e-06, + "loss": 1.2645, + "step": 643 + }, + { + "epoch": 1.2361977916466635, + "grad_norm": 0.7270486835964114, + "learning_rate": 9.358873431739712e-06, + "loss": 1.2607, + "step": 644 + }, + { + "epoch": 1.2381180988958234, + "grad_norm": 0.8519540490456053, + "learning_rate": 9.355586479523424e-06, + "loss": 1.3387, + "step": 645 + }, + { + "epoch": 1.2400384061449832, + "grad_norm": 0.6797527920764758, + "learning_rate": 9.352291703266332e-06, + "loss": 1.3279, + "step": 646 + }, + { + "epoch": 1.241958713394143, + "grad_norm": 0.8368086299915656, + "learning_rate": 9.348989108886919e-06, + "loss": 1.3217, + "step": 647 + }, + { + "epoch": 1.243879020643303, + "grad_norm": 0.8539520255198279, + "learning_rate": 9.345678702317711e-06, + "loss": 1.3196, + "step": 648 + }, + { + "epoch": 1.2457993278924628, + "grad_norm": 0.6860987908944224, + "learning_rate": 9.342360489505271e-06, + "loss": 1.4192, + "step": 649 + }, + { + "epoch": 1.2477196351416227, + "grad_norm": 0.8205531903946733, + "learning_rate": 9.339034476410177e-06, + "loss": 1.2021, + "step": 650 + }, + { + "epoch": 1.2496399423907825, + "grad_norm": 0.9081433696653918, + "learning_rate": 9.335700669007028e-06, + "loss": 1.2448, + "step": 651 + }, + { + "epoch": 1.2515602496399425, + "grad_norm": 0.7071653317141182, + "learning_rate": 9.332359073284417e-06, + "loss": 1.2858, + "step": 652 + }, + { + "epoch": 1.2534805568891023, + "grad_norm": 0.7411138132312242, + "learning_rate": 9.329009695244929e-06, + "loss": 1.2792, + "step": 653 + }, + { + "epoch": 1.2554008641382621, + "grad_norm": 0.8302739758280621, + "learning_rate": 9.32565254090513e-06, + "loss": 1.2798, + "step": 654 + }, + { + "epoch": 1.257321171387422, + "grad_norm": 0.7748066850549846, + "learning_rate": 9.322287616295553e-06, + "loss": 1.372, + "step": 655 + }, + { + "epoch": 1.2592414786365818, + "grad_norm": 0.7053895497064686, + "learning_rate": 9.318914927460694e-06, + "loss": 1.3919, + "step": 656 + }, + { + "epoch": 1.2611617858857418, + "grad_norm": 0.7970812172979578, + "learning_rate": 9.315534480458986e-06, + "loss": 1.3171, + "step": 657 + }, + { + "epoch": 1.2630820931349016, + "grad_norm": 0.6944180261511493, + "learning_rate": 9.312146281362811e-06, + "loss": 1.0936, + "step": 658 + }, + { + "epoch": 1.2650024003840614, + "grad_norm": 0.6378844061470125, + "learning_rate": 9.308750336258463e-06, + "loss": 1.3324, + "step": 659 + }, + { + "epoch": 1.2669227076332212, + "grad_norm": 0.8429774002872888, + "learning_rate": 9.30534665124616e-06, + "loss": 1.2359, + "step": 660 + }, + { + "epoch": 1.268843014882381, + "grad_norm": 0.8204171557741506, + "learning_rate": 9.30193523244002e-06, + "loss": 1.3692, + "step": 661 + }, + { + "epoch": 1.270763322131541, + "grad_norm": 0.8025145383476096, + "learning_rate": 9.298516085968052e-06, + "loss": 1.1787, + "step": 662 + }, + { + "epoch": 1.272683629380701, + "grad_norm": 0.7124288281643562, + "learning_rate": 9.295089217972152e-06, + "loss": 1.2906, + "step": 663 + }, + { + "epoch": 1.2746039366298607, + "grad_norm": 0.7525796363802068, + "learning_rate": 9.291654634608079e-06, + "loss": 1.297, + "step": 664 + }, + { + "epoch": 1.2765242438790207, + "grad_norm": 1.067363938892021, + "learning_rate": 9.288212342045453e-06, + "loss": 1.3022, + "step": 665 + }, + { + "epoch": 1.2784445511281806, + "grad_norm": 0.6619389553270122, + "learning_rate": 9.284762346467749e-06, + "loss": 1.2192, + "step": 666 + }, + { + "epoch": 1.2803648583773404, + "grad_norm": 0.7985420894282017, + "learning_rate": 9.281304654072267e-06, + "loss": 1.2289, + "step": 667 + }, + { + "epoch": 1.2822851656265002, + "grad_norm": 0.6623460682542177, + "learning_rate": 9.277839271070146e-06, + "loss": 1.1139, + "step": 668 + }, + { + "epoch": 1.28420547287566, + "grad_norm": 0.702752052754151, + "learning_rate": 9.274366203686332e-06, + "loss": 1.1201, + "step": 669 + }, + { + "epoch": 1.28612578012482, + "grad_norm": 0.6289400458286583, + "learning_rate": 9.270885458159576e-06, + "loss": 1.2891, + "step": 670 + }, + { + "epoch": 1.2880460873739799, + "grad_norm": 0.8427578486969821, + "learning_rate": 9.267397040742419e-06, + "loss": 1.3096, + "step": 671 + }, + { + "epoch": 1.2899663946231397, + "grad_norm": 0.6682673731700085, + "learning_rate": 9.263900957701191e-06, + "loss": 1.3436, + "step": 672 + }, + { + "epoch": 1.2918867018722997, + "grad_norm": 0.5915568132233988, + "learning_rate": 9.260397215315982e-06, + "loss": 1.177, + "step": 673 + }, + { + "epoch": 1.2938070091214593, + "grad_norm": 0.845875785609105, + "learning_rate": 9.25688581988065e-06, + "loss": 1.3084, + "step": 674 + }, + { + "epoch": 1.2957273163706193, + "grad_norm": 0.7518298927765613, + "learning_rate": 9.253366777702793e-06, + "loss": 1.3525, + "step": 675 + }, + { + "epoch": 1.2976476236197791, + "grad_norm": 0.6317348605399149, + "learning_rate": 9.249840095103748e-06, + "loss": 1.4346, + "step": 676 + }, + { + "epoch": 1.299567930868939, + "grad_norm": 0.7065670849481425, + "learning_rate": 9.246305778418578e-06, + "loss": 1.2933, + "step": 677 + }, + { + "epoch": 1.301488238118099, + "grad_norm": 0.6836131464449633, + "learning_rate": 9.242763833996058e-06, + "loss": 1.2998, + "step": 678 + }, + { + "epoch": 1.3034085453672588, + "grad_norm": 0.6170451881755116, + "learning_rate": 9.239214268198662e-06, + "loss": 1.3884, + "step": 679 + }, + { + "epoch": 1.3053288526164186, + "grad_norm": 0.8701684854560547, + "learning_rate": 9.235657087402561e-06, + "loss": 1.2995, + "step": 680 + }, + { + "epoch": 1.3072491598655784, + "grad_norm": 0.695844689357289, + "learning_rate": 9.232092297997598e-06, + "loss": 1.2732, + "step": 681 + }, + { + "epoch": 1.3091694671147383, + "grad_norm": 0.7103405815755808, + "learning_rate": 9.228519906387287e-06, + "loss": 1.3625, + "step": 682 + }, + { + "epoch": 1.3110897743638983, + "grad_norm": 0.7065126258973163, + "learning_rate": 9.2249399189888e-06, + "loss": 1.3358, + "step": 683 + }, + { + "epoch": 1.313010081613058, + "grad_norm": 0.703504561635419, + "learning_rate": 9.22135234223295e-06, + "loss": 1.3198, + "step": 684 + }, + { + "epoch": 1.314930388862218, + "grad_norm": 0.6758713111088065, + "learning_rate": 9.217757182564185e-06, + "loss": 1.2968, + "step": 685 + }, + { + "epoch": 1.316850696111378, + "grad_norm": 0.6586940502760603, + "learning_rate": 9.214154446440571e-06, + "loss": 1.3567, + "step": 686 + }, + { + "epoch": 1.3187710033605378, + "grad_norm": 0.6916983456158645, + "learning_rate": 9.210544140333787e-06, + "loss": 1.2495, + "step": 687 + }, + { + "epoch": 1.3206913106096976, + "grad_norm": 0.6687033750412827, + "learning_rate": 9.206926270729112e-06, + "loss": 1.3326, + "step": 688 + }, + { + "epoch": 1.3226116178588574, + "grad_norm": 0.6514229288911735, + "learning_rate": 9.203300844125407e-06, + "loss": 1.3184, + "step": 689 + }, + { + "epoch": 1.3245319251080172, + "grad_norm": 0.7367890594020262, + "learning_rate": 9.199667867035111e-06, + "loss": 1.1066, + "step": 690 + }, + { + "epoch": 1.3264522323571772, + "grad_norm": 0.6791993316929129, + "learning_rate": 9.196027345984223e-06, + "loss": 1.2166, + "step": 691 + }, + { + "epoch": 1.328372539606337, + "grad_norm": 0.6186559224575634, + "learning_rate": 9.192379287512294e-06, + "loss": 1.3028, + "step": 692 + }, + { + "epoch": 1.3302928468554969, + "grad_norm": 0.7034503705343318, + "learning_rate": 9.188723698172421e-06, + "loss": 1.5347, + "step": 693 + }, + { + "epoch": 1.3322131541046567, + "grad_norm": 0.6229250085704403, + "learning_rate": 9.185060584531218e-06, + "loss": 1.4723, + "step": 694 + }, + { + "epoch": 1.3341334613538165, + "grad_norm": 0.7534919743366831, + "learning_rate": 9.181389953168825e-06, + "loss": 1.1994, + "step": 695 + }, + { + "epoch": 1.3360537686029765, + "grad_norm": 0.7107991436376052, + "learning_rate": 9.17771181067888e-06, + "loss": 1.2875, + "step": 696 + }, + { + "epoch": 1.3379740758521363, + "grad_norm": 0.6090769881805889, + "learning_rate": 9.174026163668516e-06, + "loss": 1.1811, + "step": 697 + }, + { + "epoch": 1.3398943831012962, + "grad_norm": 0.5976408741436412, + "learning_rate": 9.170333018758345e-06, + "loss": 1.2105, + "step": 698 + }, + { + "epoch": 1.3418146903504562, + "grad_norm": 0.7155288651414388, + "learning_rate": 9.166632382582452e-06, + "loss": 1.2097, + "step": 699 + }, + { + "epoch": 1.343734997599616, + "grad_norm": 0.6340468084444298, + "learning_rate": 9.162924261788372e-06, + "loss": 1.2756, + "step": 700 + }, + { + "epoch": 1.3456553048487758, + "grad_norm": 0.62226057586666, + "learning_rate": 9.159208663037088e-06, + "loss": 1.3445, + "step": 701 + }, + { + "epoch": 1.3475756120979356, + "grad_norm": 0.75460068826842, + "learning_rate": 9.15548559300302e-06, + "loss": 1.4983, + "step": 702 + }, + { + "epoch": 1.3494959193470955, + "grad_norm": 0.6610830578070283, + "learning_rate": 9.151755058374e-06, + "loss": 1.4167, + "step": 703 + }, + { + "epoch": 1.3514162265962555, + "grad_norm": 1.0148103056939377, + "learning_rate": 9.148017065851276e-06, + "loss": 1.152, + "step": 704 + }, + { + "epoch": 1.3533365338454153, + "grad_norm": 0.7157508626391539, + "learning_rate": 9.144271622149492e-06, + "loss": 1.1954, + "step": 705 + }, + { + "epoch": 1.3552568410945751, + "grad_norm": 0.6035040897309075, + "learning_rate": 9.140518733996672e-06, + "loss": 1.2952, + "step": 706 + }, + { + "epoch": 1.357177148343735, + "grad_norm": 0.7909130402584547, + "learning_rate": 9.136758408134219e-06, + "loss": 1.2898, + "step": 707 + }, + { + "epoch": 1.3590974555928947, + "grad_norm": 0.7293448393510418, + "learning_rate": 9.13299065131689e-06, + "loss": 1.2905, + "step": 708 + }, + { + "epoch": 1.3610177628420548, + "grad_norm": 0.630031286073638, + "learning_rate": 9.129215470312798e-06, + "loss": 1.3821, + "step": 709 + }, + { + "epoch": 1.3629380700912146, + "grad_norm": 0.6260025482348084, + "learning_rate": 9.125432871903383e-06, + "loss": 1.2427, + "step": 710 + }, + { + "epoch": 1.3648583773403744, + "grad_norm": 0.6759927808317432, + "learning_rate": 9.121642862883418e-06, + "loss": 1.3635, + "step": 711 + }, + { + "epoch": 1.3667786845895344, + "grad_norm": 0.6875922592092826, + "learning_rate": 9.117845450060983e-06, + "loss": 1.3153, + "step": 712 + }, + { + "epoch": 1.3686989918386943, + "grad_norm": 0.6975079984965866, + "learning_rate": 9.114040640257457e-06, + "loss": 1.3284, + "step": 713 + }, + { + "epoch": 1.370619299087854, + "grad_norm": 0.6542547355789712, + "learning_rate": 9.11022844030751e-06, + "loss": 1.268, + "step": 714 + }, + { + "epoch": 1.3725396063370139, + "grad_norm": 0.7131124321593435, + "learning_rate": 9.106408857059083e-06, + "loss": 1.3038, + "step": 715 + }, + { + "epoch": 1.3744599135861737, + "grad_norm": 0.6621678872162269, + "learning_rate": 9.102581897373385e-06, + "loss": 1.3846, + "step": 716 + }, + { + "epoch": 1.3763802208353337, + "grad_norm": 0.6528497631975559, + "learning_rate": 9.098747568124867e-06, + "loss": 1.391, + "step": 717 + }, + { + "epoch": 1.3783005280844935, + "grad_norm": 0.7950540458654306, + "learning_rate": 9.09490587620123e-06, + "loss": 1.2342, + "step": 718 + }, + { + "epoch": 1.3802208353336534, + "grad_norm": 0.6661090729260767, + "learning_rate": 9.09105682850339e-06, + "loss": 1.1368, + "step": 719 + }, + { + "epoch": 1.3821411425828132, + "grad_norm": 0.6896287298603911, + "learning_rate": 9.08720043194548e-06, + "loss": 1.2482, + "step": 720 + }, + { + "epoch": 1.384061449831973, + "grad_norm": 0.6469933404131487, + "learning_rate": 9.083336693454835e-06, + "loss": 1.2464, + "step": 721 + }, + { + "epoch": 1.385981757081133, + "grad_norm": 0.6564169816340895, + "learning_rate": 9.079465619971979e-06, + "loss": 1.3078, + "step": 722 + }, + { + "epoch": 1.3879020643302928, + "grad_norm": 0.6191833636580572, + "learning_rate": 9.075587218450611e-06, + "loss": 1.1747, + "step": 723 + }, + { + "epoch": 1.3898223715794527, + "grad_norm": 0.6183363215998144, + "learning_rate": 9.071701495857593e-06, + "loss": 1.3765, + "step": 724 + }, + { + "epoch": 1.3917426788286127, + "grad_norm": 0.7166010853381539, + "learning_rate": 9.067808459172935e-06, + "loss": 1.1739, + "step": 725 + }, + { + "epoch": 1.3936629860777725, + "grad_norm": 0.5814057362039444, + "learning_rate": 9.063908115389794e-06, + "loss": 1.2789, + "step": 726 + }, + { + "epoch": 1.3955832933269323, + "grad_norm": 0.7121122914175096, + "learning_rate": 9.060000471514447e-06, + "loss": 1.4757, + "step": 727 + }, + { + "epoch": 1.3975036005760921, + "grad_norm": 0.705583390032838, + "learning_rate": 9.056085534566283e-06, + "loss": 1.1249, + "step": 728 + }, + { + "epoch": 1.399423907825252, + "grad_norm": 0.6107217100732334, + "learning_rate": 9.052163311577795e-06, + "loss": 1.2457, + "step": 729 + }, + { + "epoch": 1.401344215074412, + "grad_norm": 0.6570510132698529, + "learning_rate": 9.048233809594561e-06, + "loss": 1.3098, + "step": 730 + }, + { + "epoch": 1.4032645223235718, + "grad_norm": 0.6242960521987202, + "learning_rate": 9.04429703567524e-06, + "loss": 1.3533, + "step": 731 + }, + { + "epoch": 1.4051848295727316, + "grad_norm": 0.699339430879501, + "learning_rate": 9.040352996891549e-06, + "loss": 1.5123, + "step": 732 + }, + { + "epoch": 1.4071051368218914, + "grad_norm": 0.8344946761901371, + "learning_rate": 9.036401700328255e-06, + "loss": 1.3611, + "step": 733 + }, + { + "epoch": 1.4090254440710512, + "grad_norm": 0.5789608462391438, + "learning_rate": 9.032443153083163e-06, + "loss": 1.2094, + "step": 734 + }, + { + "epoch": 1.4109457513202113, + "grad_norm": 0.7135604127502478, + "learning_rate": 9.028477362267103e-06, + "loss": 1.3675, + "step": 735 + }, + { + "epoch": 1.412866058569371, + "grad_norm": 0.657978923325545, + "learning_rate": 9.024504335003918e-06, + "loss": 1.1803, + "step": 736 + }, + { + "epoch": 1.414786365818531, + "grad_norm": 0.6166786992689153, + "learning_rate": 9.02052407843045e-06, + "loss": 1.3075, + "step": 737 + }, + { + "epoch": 1.416706673067691, + "grad_norm": 0.6363515202021924, + "learning_rate": 9.016536599696524e-06, + "loss": 1.4076, + "step": 738 + }, + { + "epoch": 1.4186269803168507, + "grad_norm": 0.7118481237155734, + "learning_rate": 9.01254190596494e-06, + "loss": 1.3571, + "step": 739 + }, + { + "epoch": 1.4205472875660106, + "grad_norm": 0.6600459827422428, + "learning_rate": 9.00854000441146e-06, + "loss": 1.1144, + "step": 740 + }, + { + "epoch": 1.4224675948151704, + "grad_norm": 0.6239591484123759, + "learning_rate": 9.004530902224793e-06, + "loss": 1.3528, + "step": 741 + }, + { + "epoch": 1.4243879020643302, + "grad_norm": 0.7100163193828547, + "learning_rate": 9.00051460660658e-06, + "loss": 1.31, + "step": 742 + }, + { + "epoch": 1.4263082093134902, + "grad_norm": 0.7325087503328771, + "learning_rate": 8.996491124771387e-06, + "loss": 1.306, + "step": 743 + }, + { + "epoch": 1.42822851656265, + "grad_norm": 0.730452846870962, + "learning_rate": 8.992460463946689e-06, + "loss": 1.2024, + "step": 744 + }, + { + "epoch": 1.4301488238118099, + "grad_norm": 0.7161540737322926, + "learning_rate": 8.988422631372854e-06, + "loss": 1.1938, + "step": 745 + }, + { + "epoch": 1.43206913106097, + "grad_norm": 0.6561434676104188, + "learning_rate": 8.98437763430313e-06, + "loss": 1.1944, + "step": 746 + }, + { + "epoch": 1.4339894383101295, + "grad_norm": 0.6297905970632676, + "learning_rate": 8.980325480003647e-06, + "loss": 1.3503, + "step": 747 + }, + { + "epoch": 1.4359097455592895, + "grad_norm": 0.7625277635565622, + "learning_rate": 8.976266175753376e-06, + "loss": 1.34, + "step": 748 + }, + { + "epoch": 1.4378300528084493, + "grad_norm": 0.6672733475659698, + "learning_rate": 8.972199728844144e-06, + "loss": 1.1897, + "step": 749 + }, + { + "epoch": 1.4397503600576091, + "grad_norm": 0.7199838468969945, + "learning_rate": 8.968126146580602e-06, + "loss": 1.3659, + "step": 750 + }, + { + "epoch": 1.4416706673067692, + "grad_norm": 0.6953976515544105, + "learning_rate": 8.96404543628022e-06, + "loss": 1.3271, + "step": 751 + }, + { + "epoch": 1.443590974555929, + "grad_norm": 0.6846915648125165, + "learning_rate": 8.959957605273274e-06, + "loss": 1.1573, + "step": 752 + }, + { + "epoch": 1.4455112818050888, + "grad_norm": 0.6841588862649848, + "learning_rate": 8.955862660902828e-06, + "loss": 1.3383, + "step": 753 + }, + { + "epoch": 1.4474315890542486, + "grad_norm": 0.6213000908457852, + "learning_rate": 8.951760610524725e-06, + "loss": 1.1511, + "step": 754 + }, + { + "epoch": 1.4493518963034084, + "grad_norm": 0.5816520616610011, + "learning_rate": 8.947651461507573e-06, + "loss": 1.2896, + "step": 755 + }, + { + "epoch": 1.4512722035525685, + "grad_norm": 0.6834825786713268, + "learning_rate": 8.943535221232731e-06, + "loss": 1.1905, + "step": 756 + }, + { + "epoch": 1.4531925108017283, + "grad_norm": 0.7390087469911581, + "learning_rate": 8.939411897094298e-06, + "loss": 1.3174, + "step": 757 + }, + { + "epoch": 1.455112818050888, + "grad_norm": 0.7196135019950182, + "learning_rate": 8.935281496499098e-06, + "loss": 1.2327, + "step": 758 + }, + { + "epoch": 1.4570331253000481, + "grad_norm": 0.5860396585624823, + "learning_rate": 8.931144026866662e-06, + "loss": 1.2348, + "step": 759 + }, + { + "epoch": 1.458953432549208, + "grad_norm": 0.7918382564660377, + "learning_rate": 8.926999495629225e-06, + "loss": 1.1791, + "step": 760 + }, + { + "epoch": 1.4608737397983678, + "grad_norm": 0.7055490148273642, + "learning_rate": 8.922847910231702e-06, + "loss": 1.1687, + "step": 761 + }, + { + "epoch": 1.4627940470475276, + "grad_norm": 0.5524065552778338, + "learning_rate": 8.918689278131684e-06, + "loss": 1.3717, + "step": 762 + }, + { + "epoch": 1.4647143542966874, + "grad_norm": 0.7460708719531967, + "learning_rate": 8.914523606799416e-06, + "loss": 1.2305, + "step": 763 + }, + { + "epoch": 1.4666346615458474, + "grad_norm": 0.6951265014515928, + "learning_rate": 8.910350903717793e-06, + "loss": 1.3857, + "step": 764 + }, + { + "epoch": 1.4685549687950072, + "grad_norm": 0.6742950089132507, + "learning_rate": 8.906171176382336e-06, + "loss": 1.2732, + "step": 765 + }, + { + "epoch": 1.470475276044167, + "grad_norm": 0.7672591308011247, + "learning_rate": 8.901984432301185e-06, + "loss": 1.3741, + "step": 766 + }, + { + "epoch": 1.4723955832933269, + "grad_norm": 0.6895255447617727, + "learning_rate": 8.897790678995088e-06, + "loss": 1.1914, + "step": 767 + }, + { + "epoch": 1.4743158905424867, + "grad_norm": 0.5905849417666214, + "learning_rate": 8.893589923997379e-06, + "loss": 1.2328, + "step": 768 + }, + { + "epoch": 1.4762361977916467, + "grad_norm": 0.7661433188154492, + "learning_rate": 8.889382174853971e-06, + "loss": 1.4402, + "step": 769 + }, + { + "epoch": 1.4781565050408065, + "grad_norm": 0.7226773436729304, + "learning_rate": 8.885167439123343e-06, + "loss": 1.3498, + "step": 770 + }, + { + "epoch": 1.4800768122899663, + "grad_norm": 0.645854499389175, + "learning_rate": 8.880945724376522e-06, + "loss": 1.0738, + "step": 771 + }, + { + "epoch": 1.4819971195391264, + "grad_norm": 0.730924402691002, + "learning_rate": 8.876717038197072e-06, + "loss": 1.2628, + "step": 772 + }, + { + "epoch": 1.4839174267882862, + "grad_norm": 0.7007233981122745, + "learning_rate": 8.872481388181076e-06, + "loss": 1.1873, + "step": 773 + }, + { + "epoch": 1.485837734037446, + "grad_norm": 0.5889909988446032, + "learning_rate": 8.868238781937137e-06, + "loss": 1.0944, + "step": 774 + }, + { + "epoch": 1.4877580412866058, + "grad_norm": 0.7107956135191795, + "learning_rate": 8.863989227086343e-06, + "loss": 1.3349, + "step": 775 + }, + { + "epoch": 1.4896783485357656, + "grad_norm": 0.6175391436776866, + "learning_rate": 8.859732731262268e-06, + "loss": 1.2646, + "step": 776 + }, + { + "epoch": 1.4915986557849257, + "grad_norm": 0.65271111460711, + "learning_rate": 8.855469302110952e-06, + "loss": 1.2263, + "step": 777 + }, + { + "epoch": 1.4935189630340855, + "grad_norm": 0.6583187935159579, + "learning_rate": 8.851198947290895e-06, + "loss": 1.202, + "step": 778 + }, + { + "epoch": 1.4954392702832453, + "grad_norm": 0.5781288868057943, + "learning_rate": 8.846921674473033e-06, + "loss": 1.3714, + "step": 779 + }, + { + "epoch": 1.4973595775324051, + "grad_norm": 0.6783395545902824, + "learning_rate": 8.842637491340728e-06, + "loss": 1.3051, + "step": 780 + }, + { + "epoch": 1.499279884781565, + "grad_norm": 0.7190117162268456, + "learning_rate": 8.83834640558976e-06, + "loss": 1.1348, + "step": 781 + }, + { + "epoch": 1.501200192030725, + "grad_norm": 0.7371660449929316, + "learning_rate": 8.834048424928305e-06, + "loss": 1.2488, + "step": 782 + }, + { + "epoch": 1.5031204992798848, + "grad_norm": 0.6734459491576749, + "learning_rate": 8.829743557076926e-06, + "loss": 1.2673, + "step": 783 + }, + { + "epoch": 1.5050408065290446, + "grad_norm": 0.6032860650966215, + "learning_rate": 8.825431809768554e-06, + "loss": 1.2917, + "step": 784 + }, + { + "epoch": 1.5069611137782046, + "grad_norm": 0.7510759569269795, + "learning_rate": 8.821113190748482e-06, + "loss": 1.4579, + "step": 785 + }, + { + "epoch": 1.5088814210273642, + "grad_norm": 0.6955345602964695, + "learning_rate": 8.816787707774347e-06, + "loss": 1.2052, + "step": 786 + }, + { + "epoch": 1.5108017282765243, + "grad_norm": 0.699131477732091, + "learning_rate": 8.812455368616112e-06, + "loss": 1.2347, + "step": 787 + }, + { + "epoch": 1.512722035525684, + "grad_norm": 0.6752955040693549, + "learning_rate": 8.808116181056059e-06, + "loss": 1.2042, + "step": 788 + }, + { + "epoch": 1.5146423427748439, + "grad_norm": 0.7495344136959898, + "learning_rate": 8.803770152888771e-06, + "loss": 1.2847, + "step": 789 + }, + { + "epoch": 1.516562650024004, + "grad_norm": 0.6746945912508688, + "learning_rate": 8.799417291921117e-06, + "loss": 1.3633, + "step": 790 + }, + { + "epoch": 1.5184829572731637, + "grad_norm": 0.7701161584892516, + "learning_rate": 8.795057605972247e-06, + "loss": 1.3794, + "step": 791 + }, + { + "epoch": 1.5204032645223235, + "grad_norm": 0.7268566540300971, + "learning_rate": 8.790691102873558e-06, + "loss": 1.1022, + "step": 792 + }, + { + "epoch": 1.5223235717714836, + "grad_norm": 0.7419238096069151, + "learning_rate": 8.786317790468708e-06, + "loss": 1.3445, + "step": 793 + }, + { + "epoch": 1.5242438790206432, + "grad_norm": 0.5804158480082398, + "learning_rate": 8.781937676613577e-06, + "loss": 1.5153, + "step": 794 + }, + { + "epoch": 1.5261641862698032, + "grad_norm": 0.816197319588985, + "learning_rate": 8.777550769176263e-06, + "loss": 1.2333, + "step": 795 + }, + { + "epoch": 1.528084493518963, + "grad_norm": 0.7546662492454415, + "learning_rate": 8.77315707603707e-06, + "loss": 1.2277, + "step": 796 + }, + { + "epoch": 1.5300048007681228, + "grad_norm": 0.580844842754435, + "learning_rate": 8.76875660508849e-06, + "loss": 1.403, + "step": 797 + }, + { + "epoch": 1.5319251080172829, + "grad_norm": 0.6741408076571399, + "learning_rate": 8.764349364235197e-06, + "loss": 1.3271, + "step": 798 + }, + { + "epoch": 1.5338454152664425, + "grad_norm": 0.779691874848022, + "learning_rate": 8.759935361394011e-06, + "loss": 1.3694, + "step": 799 + }, + { + "epoch": 1.5357657225156025, + "grad_norm": 0.7069960035792014, + "learning_rate": 8.755514604493912e-06, + "loss": 1.1364, + "step": 800 + }, + { + "epoch": 1.5376860297647623, + "grad_norm": 0.6332924636304982, + "learning_rate": 8.751087101476008e-06, + "loss": 1.4002, + "step": 801 + }, + { + "epoch": 1.5396063370139221, + "grad_norm": 0.7044744362416293, + "learning_rate": 8.746652860293523e-06, + "loss": 1.1917, + "step": 802 + }, + { + "epoch": 1.5415266442630822, + "grad_norm": 0.7812830705790423, + "learning_rate": 8.742211888911788e-06, + "loss": 1.4769, + "step": 803 + }, + { + "epoch": 1.543446951512242, + "grad_norm": 0.675685814114717, + "learning_rate": 8.737764195308226e-06, + "loss": 1.2728, + "step": 804 + }, + { + "epoch": 1.5453672587614018, + "grad_norm": 0.6820193005764167, + "learning_rate": 8.733309787472328e-06, + "loss": 1.3128, + "step": 805 + }, + { + "epoch": 1.5472875660105618, + "grad_norm": 0.7153026774691486, + "learning_rate": 8.72884867340565e-06, + "loss": 1.1845, + "step": 806 + }, + { + "epoch": 1.5492078732597214, + "grad_norm": 0.6525337231248045, + "learning_rate": 8.724380861121795e-06, + "loss": 1.2251, + "step": 807 + }, + { + "epoch": 1.5511281805088815, + "grad_norm": 0.6540180109670667, + "learning_rate": 8.7199063586464e-06, + "loss": 1.2874, + "step": 808 + }, + { + "epoch": 1.5530484877580413, + "grad_norm": 0.7635326405494673, + "learning_rate": 8.715425174017113e-06, + "loss": 1.358, + "step": 809 + }, + { + "epoch": 1.554968795007201, + "grad_norm": 0.6242507146581721, + "learning_rate": 8.710937315283594e-06, + "loss": 1.2419, + "step": 810 + }, + { + "epoch": 1.5568891022563611, + "grad_norm": 0.744752759091194, + "learning_rate": 8.706442790507486e-06, + "loss": 1.3055, + "step": 811 + }, + { + "epoch": 1.558809409505521, + "grad_norm": 0.6654168047572514, + "learning_rate": 8.701941607762407e-06, + "loss": 1.4469, + "step": 812 + }, + { + "epoch": 1.5607297167546808, + "grad_norm": 0.7586952812790637, + "learning_rate": 8.697433775133934e-06, + "loss": 1.2091, + "step": 813 + }, + { + "epoch": 1.5626500240038406, + "grad_norm": 0.7505624695472355, + "learning_rate": 8.692919300719596e-06, + "loss": 1.1346, + "step": 814 + }, + { + "epoch": 1.5645703312530004, + "grad_norm": 0.6150274934828531, + "learning_rate": 8.688398192628844e-06, + "loss": 1.2218, + "step": 815 + }, + { + "epoch": 1.5664906385021604, + "grad_norm": 0.6830054941296687, + "learning_rate": 8.68387045898305e-06, + "loss": 1.1967, + "step": 816 + }, + { + "epoch": 1.5684109457513202, + "grad_norm": 0.6697744853587582, + "learning_rate": 8.679336107915487e-06, + "loss": 1.2602, + "step": 817 + }, + { + "epoch": 1.57033125300048, + "grad_norm": 0.6949578343964008, + "learning_rate": 8.674795147571318e-06, + "loss": 1.2928, + "step": 818 + }, + { + "epoch": 1.57225156024964, + "grad_norm": 0.633917787386206, + "learning_rate": 8.670247586107567e-06, + "loss": 1.2756, + "step": 819 + }, + { + "epoch": 1.5741718674987997, + "grad_norm": 0.7247388852385745, + "learning_rate": 8.665693431693132e-06, + "loss": 1.2376, + "step": 820 + }, + { + "epoch": 1.5760921747479597, + "grad_norm": 0.6370568157248708, + "learning_rate": 8.661132692508742e-06, + "loss": 1.2305, + "step": 821 + }, + { + "epoch": 1.5780124819971195, + "grad_norm": 0.5773275861749614, + "learning_rate": 8.656565376746959e-06, + "loss": 1.3996, + "step": 822 + }, + { + "epoch": 1.5799327892462793, + "grad_norm": 0.7055843854327438, + "learning_rate": 8.65199149261216e-06, + "loss": 1.2102, + "step": 823 + }, + { + "epoch": 1.5818530964954394, + "grad_norm": 0.7568845107850198, + "learning_rate": 8.647411048320515e-06, + "loss": 1.14, + "step": 824 + }, + { + "epoch": 1.5837734037445992, + "grad_norm": 0.6353739121479085, + "learning_rate": 8.642824052099986e-06, + "loss": 1.383, + "step": 825 + }, + { + "epoch": 1.585693710993759, + "grad_norm": 0.7245216800702469, + "learning_rate": 8.638230512190298e-06, + "loss": 1.3269, + "step": 826 + }, + { + "epoch": 1.5876140182429188, + "grad_norm": 0.6456003754892913, + "learning_rate": 8.633630436842933e-06, + "loss": 1.391, + "step": 827 + }, + { + "epoch": 1.5895343254920786, + "grad_norm": 0.764262777332275, + "learning_rate": 8.629023834321113e-06, + "loss": 1.3651, + "step": 828 + }, + { + "epoch": 1.5914546327412387, + "grad_norm": 0.6827790863280228, + "learning_rate": 8.624410712899787e-06, + "loss": 1.2995, + "step": 829 + }, + { + "epoch": 1.5933749399903985, + "grad_norm": 0.6862806810660659, + "learning_rate": 8.619791080865609e-06, + "loss": 1.1752, + "step": 830 + }, + { + "epoch": 1.5952952472395583, + "grad_norm": 0.6926687316972496, + "learning_rate": 8.615164946516935e-06, + "loss": 1.2555, + "step": 831 + }, + { + "epoch": 1.5972155544887183, + "grad_norm": 0.6251419290492334, + "learning_rate": 8.61053231816379e-06, + "loss": 1.163, + "step": 832 + }, + { + "epoch": 1.599135861737878, + "grad_norm": 0.692086428022269, + "learning_rate": 8.605893204127877e-06, + "loss": 1.324, + "step": 833 + }, + { + "epoch": 1.601056168987038, + "grad_norm": 0.7778455904448239, + "learning_rate": 8.601247612742545e-06, + "loss": 1.2905, + "step": 834 + }, + { + "epoch": 1.6029764762361978, + "grad_norm": 0.6196302763793925, + "learning_rate": 8.596595552352773e-06, + "loss": 1.274, + "step": 835 + }, + { + "epoch": 1.6048967834853576, + "grad_norm": 0.6237845300699906, + "learning_rate": 8.591937031315167e-06, + "loss": 1.2982, + "step": 836 + }, + { + "epoch": 1.6068170907345176, + "grad_norm": 0.613978703840158, + "learning_rate": 8.587272057997937e-06, + "loss": 1.2335, + "step": 837 + }, + { + "epoch": 1.6087373979836774, + "grad_norm": 0.6686343059058948, + "learning_rate": 8.58260064078088e-06, + "loss": 1.3702, + "step": 838 + }, + { + "epoch": 1.6106577052328372, + "grad_norm": 0.759769684426153, + "learning_rate": 8.577922788055375e-06, + "loss": 1.2766, + "step": 839 + }, + { + "epoch": 1.6125780124819973, + "grad_norm": 0.6245956368253657, + "learning_rate": 8.573238508224351e-06, + "loss": 1.2805, + "step": 840 + }, + { + "epoch": 1.6144983197311569, + "grad_norm": 0.7290629551761605, + "learning_rate": 8.568547809702294e-06, + "loss": 1.1846, + "step": 841 + }, + { + "epoch": 1.616418626980317, + "grad_norm": 0.6779816447540971, + "learning_rate": 8.563850700915211e-06, + "loss": 1.3086, + "step": 842 + }, + { + "epoch": 1.6183389342294767, + "grad_norm": 0.6318966650515423, + "learning_rate": 8.55914719030063e-06, + "loss": 1.131, + "step": 843 + }, + { + "epoch": 1.6202592414786365, + "grad_norm": 0.6427129904533472, + "learning_rate": 8.554437286307573e-06, + "loss": 1.3493, + "step": 844 + }, + { + "epoch": 1.6221795487277966, + "grad_norm": 0.7080851195930218, + "learning_rate": 8.54972099739655e-06, + "loss": 1.2393, + "step": 845 + }, + { + "epoch": 1.6240998559769562, + "grad_norm": 0.6160771656855515, + "learning_rate": 8.544998332039543e-06, + "loss": 1.3681, + "step": 846 + }, + { + "epoch": 1.6260201632261162, + "grad_norm": 0.8454044975860124, + "learning_rate": 8.540269298719981e-06, + "loss": 1.2243, + "step": 847 + }, + { + "epoch": 1.627940470475276, + "grad_norm": 0.7375528021953657, + "learning_rate": 8.535533905932739e-06, + "loss": 1.325, + "step": 848 + }, + { + "epoch": 1.6298607777244358, + "grad_norm": 0.6216999198337684, + "learning_rate": 8.530792162184112e-06, + "loss": 1.3877, + "step": 849 + }, + { + "epoch": 1.6317810849735959, + "grad_norm": 0.6818891969657709, + "learning_rate": 8.526044075991801e-06, + "loss": 1.1877, + "step": 850 + }, + { + "epoch": 1.6337013922227557, + "grad_norm": 0.7427712661902629, + "learning_rate": 8.521289655884908e-06, + "loss": 1.1278, + "step": 851 + }, + { + "epoch": 1.6356216994719155, + "grad_norm": 0.5952544225101838, + "learning_rate": 8.516528910403906e-06, + "loss": 1.4064, + "step": 852 + }, + { + "epoch": 1.6375420067210755, + "grad_norm": 0.6848120275660302, + "learning_rate": 8.51176184810063e-06, + "loss": 1.3963, + "step": 853 + }, + { + "epoch": 1.6394623139702351, + "grad_norm": 0.7063980789331809, + "learning_rate": 8.506988477538267e-06, + "loss": 1.1821, + "step": 854 + }, + { + "epoch": 1.6413826212193952, + "grad_norm": 0.6446077020839321, + "learning_rate": 8.502208807291332e-06, + "loss": 1.3422, + "step": 855 + }, + { + "epoch": 1.643302928468555, + "grad_norm": 0.6388455910147647, + "learning_rate": 8.497422845945658e-06, + "loss": 1.2511, + "step": 856 + }, + { + "epoch": 1.6452232357177148, + "grad_norm": 0.7399303493707643, + "learning_rate": 8.492630602098377e-06, + "loss": 1.2217, + "step": 857 + }, + { + "epoch": 1.6471435429668748, + "grad_norm": 0.7045086476609864, + "learning_rate": 8.487832084357908e-06, + "loss": 1.3431, + "step": 858 + }, + { + "epoch": 1.6490638502160344, + "grad_norm": 0.7039565820761484, + "learning_rate": 8.483027301343942e-06, + "loss": 1.3048, + "step": 859 + }, + { + "epoch": 1.6509841574651944, + "grad_norm": 0.6338362458948208, + "learning_rate": 8.478216261687417e-06, + "loss": 1.2741, + "step": 860 + }, + { + "epoch": 1.6529044647143543, + "grad_norm": 0.7079854924528219, + "learning_rate": 8.473398974030519e-06, + "loss": 1.317, + "step": 861 + }, + { + "epoch": 1.654824771963514, + "grad_norm": 0.7544349465014712, + "learning_rate": 8.468575447026653e-06, + "loss": 1.2428, + "step": 862 + }, + { + "epoch": 1.656745079212674, + "grad_norm": 0.7579460936381943, + "learning_rate": 8.463745689340428e-06, + "loss": 1.327, + "step": 863 + }, + { + "epoch": 1.658665386461834, + "grad_norm": 0.6500673020764594, + "learning_rate": 8.458909709647653e-06, + "loss": 1.3508, + "step": 864 + }, + { + "epoch": 1.6605856937109937, + "grad_norm": 0.5897561762214972, + "learning_rate": 8.45406751663531e-06, + "loss": 1.5225, + "step": 865 + }, + { + "epoch": 1.6625060009601538, + "grad_norm": 0.7959327705311353, + "learning_rate": 8.449219119001543e-06, + "loss": 1.3729, + "step": 866 + }, + { + "epoch": 1.6644263082093134, + "grad_norm": 0.7331153774645073, + "learning_rate": 8.444364525455642e-06, + "loss": 1.2854, + "step": 867 + }, + { + "epoch": 1.6663466154584734, + "grad_norm": 0.5928127084001641, + "learning_rate": 8.43950374471802e-06, + "loss": 1.3122, + "step": 868 + }, + { + "epoch": 1.6682669227076332, + "grad_norm": 0.6755707034773414, + "learning_rate": 8.434636785520218e-06, + "loss": 1.2862, + "step": 869 + }, + { + "epoch": 1.670187229956793, + "grad_norm": 0.6339666901160925, + "learning_rate": 8.42976365660486e-06, + "loss": 1.3128, + "step": 870 + }, + { + "epoch": 1.672107537205953, + "grad_norm": 0.678020909918651, + "learning_rate": 8.424884366725665e-06, + "loss": 1.2833, + "step": 871 + }, + { + "epoch": 1.6740278444551127, + "grad_norm": 0.643027666772922, + "learning_rate": 8.419998924647412e-06, + "loss": 1.2969, + "step": 872 + }, + { + "epoch": 1.6759481517042727, + "grad_norm": 0.6961417219596171, + "learning_rate": 8.415107339145933e-06, + "loss": 1.1697, + "step": 873 + }, + { + "epoch": 1.6778684589534325, + "grad_norm": 0.5982028382247672, + "learning_rate": 8.4102096190081e-06, + "loss": 1.3004, + "step": 874 + }, + { + "epoch": 1.6797887662025923, + "grad_norm": 0.7682354786227164, + "learning_rate": 8.4053057730318e-06, + "loss": 1.3747, + "step": 875 + }, + { + "epoch": 1.6817090734517524, + "grad_norm": 0.6716740624063096, + "learning_rate": 8.400395810025922e-06, + "loss": 1.2719, + "step": 876 + }, + { + "epoch": 1.6836293807009122, + "grad_norm": 0.6440524699748738, + "learning_rate": 8.395479738810347e-06, + "loss": 1.3379, + "step": 877 + }, + { + "epoch": 1.685549687950072, + "grad_norm": 0.6920927110189645, + "learning_rate": 8.39055756821593e-06, + "loss": 1.2993, + "step": 878 + }, + { + "epoch": 1.687469995199232, + "grad_norm": 0.6987825363370698, + "learning_rate": 8.385629307084477e-06, + "loss": 1.2867, + "step": 879 + }, + { + "epoch": 1.6893903024483916, + "grad_norm": 0.7765961352229686, + "learning_rate": 8.38069496426874e-06, + "loss": 1.2875, + "step": 880 + }, + { + "epoch": 1.6913106096975516, + "grad_norm": 0.5996561276864317, + "learning_rate": 8.375754548632392e-06, + "loss": 1.4783, + "step": 881 + }, + { + "epoch": 1.6932309169467115, + "grad_norm": 0.5850694559829233, + "learning_rate": 8.370808069050016e-06, + "loss": 1.1561, + "step": 882 + }, + { + "epoch": 1.6951512241958713, + "grad_norm": 0.7456789269050706, + "learning_rate": 8.365855534407089e-06, + "loss": 1.2655, + "step": 883 + }, + { + "epoch": 1.6970715314450313, + "grad_norm": 0.6083339409975786, + "learning_rate": 8.360896953599962e-06, + "loss": 1.1948, + "step": 884 + }, + { + "epoch": 1.6989918386941911, + "grad_norm": 0.9171462954821785, + "learning_rate": 8.355932335535849e-06, + "loss": 1.2125, + "step": 885 + }, + { + "epoch": 1.700912145943351, + "grad_norm": 0.6521085415420599, + "learning_rate": 8.350961689132808e-06, + "loss": 1.4241, + "step": 886 + }, + { + "epoch": 1.7028324531925108, + "grad_norm": 0.7910874600723566, + "learning_rate": 8.345985023319727e-06, + "loss": 1.1417, + "step": 887 + }, + { + "epoch": 1.7047527604416706, + "grad_norm": 0.6027394559994305, + "learning_rate": 8.341002347036304e-06, + "loss": 1.2801, + "step": 888 + }, + { + "epoch": 1.7066730676908306, + "grad_norm": 0.6966016112269007, + "learning_rate": 8.336013669233039e-06, + "loss": 1.271, + "step": 889 + }, + { + "epoch": 1.7085933749399904, + "grad_norm": 0.6919363199470899, + "learning_rate": 8.331018998871207e-06, + "loss": 1.2261, + "step": 890 + }, + { + "epoch": 1.7105136821891502, + "grad_norm": 0.6706001120414775, + "learning_rate": 8.32601834492285e-06, + "loss": 1.176, + "step": 891 + }, + { + "epoch": 1.7124339894383103, + "grad_norm": 0.6297425533497251, + "learning_rate": 8.32101171637076e-06, + "loss": 1.0596, + "step": 892 + }, + { + "epoch": 1.7143542966874699, + "grad_norm": 0.7344756503897651, + "learning_rate": 8.315999122208459e-06, + "loss": 1.2274, + "step": 893 + }, + { + "epoch": 1.71627460393663, + "grad_norm": 0.5546082526816237, + "learning_rate": 8.310980571440184e-06, + "loss": 1.2394, + "step": 894 + }, + { + "epoch": 1.7181949111857897, + "grad_norm": 0.7256143287268254, + "learning_rate": 8.305956073080879e-06, + "loss": 1.2171, + "step": 895 + }, + { + "epoch": 1.7201152184349495, + "grad_norm": 0.6381053874642392, + "learning_rate": 8.300925636156159e-06, + "loss": 1.3499, + "step": 896 + }, + { + "epoch": 1.7220355256841096, + "grad_norm": 0.705514992532687, + "learning_rate": 8.295889269702322e-06, + "loss": 1.3576, + "step": 897 + }, + { + "epoch": 1.7239558329332694, + "grad_norm": 0.6870236039044634, + "learning_rate": 8.290846982766305e-06, + "loss": 1.3549, + "step": 898 + }, + { + "epoch": 1.7258761401824292, + "grad_norm": 0.6054537907728821, + "learning_rate": 8.285798784405685e-06, + "loss": 1.4147, + "step": 899 + }, + { + "epoch": 1.727796447431589, + "grad_norm": 0.6774991278621717, + "learning_rate": 8.28074468368866e-06, + "loss": 1.2747, + "step": 900 + }, + { + "epoch": 1.7297167546807488, + "grad_norm": 0.5809303428546913, + "learning_rate": 8.275684689694025e-06, + "loss": 1.2324, + "step": 901 + }, + { + "epoch": 1.7316370619299088, + "grad_norm": 0.6418772583066495, + "learning_rate": 8.270618811511166e-06, + "loss": 1.3204, + "step": 902 + }, + { + "epoch": 1.7335573691790687, + "grad_norm": 0.6916575011270142, + "learning_rate": 8.265547058240038e-06, + "loss": 1.2656, + "step": 903 + }, + { + "epoch": 1.7354776764282285, + "grad_norm": 0.6254607447916599, + "learning_rate": 8.260469438991147e-06, + "loss": 1.5506, + "step": 904 + }, + { + "epoch": 1.7373979836773885, + "grad_norm": 0.7560305369852807, + "learning_rate": 8.255385962885541e-06, + "loss": 1.2419, + "step": 905 + }, + { + "epoch": 1.739318290926548, + "grad_norm": 0.6276013332706113, + "learning_rate": 8.250296639054782e-06, + "loss": 1.1115, + "step": 906 + }, + { + "epoch": 1.7412385981757081, + "grad_norm": 0.6544438928109518, + "learning_rate": 8.245201476640943e-06, + "loss": 1.3512, + "step": 907 + }, + { + "epoch": 1.743158905424868, + "grad_norm": 0.5929397616630802, + "learning_rate": 8.240100484796581e-06, + "loss": 1.3889, + "step": 908 + }, + { + "epoch": 1.7450792126740278, + "grad_norm": 0.7204827062155562, + "learning_rate": 8.234993672684724e-06, + "loss": 1.2227, + "step": 909 + }, + { + "epoch": 1.7469995199231878, + "grad_norm": 0.6671792020030981, + "learning_rate": 8.229881049478859e-06, + "loss": 1.3579, + "step": 910 + }, + { + "epoch": 1.7489198271723476, + "grad_norm": 0.6522101763399759, + "learning_rate": 8.224762624362909e-06, + "loss": 1.4075, + "step": 911 + }, + { + "epoch": 1.7508401344215074, + "grad_norm": 0.6373133273160264, + "learning_rate": 8.21963840653122e-06, + "loss": 1.2467, + "step": 912 + }, + { + "epoch": 1.7527604416706675, + "grad_norm": 0.6100626574590131, + "learning_rate": 8.214508405188543e-06, + "loss": 1.3204, + "step": 913 + }, + { + "epoch": 1.754680748919827, + "grad_norm": 0.7382960675500586, + "learning_rate": 8.209372629550018e-06, + "loss": 1.3108, + "step": 914 + }, + { + "epoch": 1.756601056168987, + "grad_norm": 0.6147124939284418, + "learning_rate": 8.204231088841156e-06, + "loss": 1.301, + "step": 915 + }, + { + "epoch": 1.758521363418147, + "grad_norm": 0.6584981237032568, + "learning_rate": 8.199083792297828e-06, + "loss": 1.2281, + "step": 916 + }, + { + "epoch": 1.7604416706673067, + "grad_norm": 0.6195212578955458, + "learning_rate": 8.193930749166239e-06, + "loss": 1.3111, + "step": 917 + }, + { + "epoch": 1.7623619779164668, + "grad_norm": 0.6466090662113759, + "learning_rate": 8.188771968702924e-06, + "loss": 1.1323, + "step": 918 + }, + { + "epoch": 1.7642822851656264, + "grad_norm": 0.7163500412541859, + "learning_rate": 8.183607460174716e-06, + "loss": 1.2133, + "step": 919 + }, + { + "epoch": 1.7662025924147864, + "grad_norm": 0.6355256943730633, + "learning_rate": 8.178437232858743e-06, + "loss": 1.2225, + "step": 920 + }, + { + "epoch": 1.7681228996639462, + "grad_norm": 0.6133836361205134, + "learning_rate": 8.173261296042401e-06, + "loss": 1.2216, + "step": 921 + }, + { + "epoch": 1.770043206913106, + "grad_norm": 0.6259716243382641, + "learning_rate": 8.168079659023349e-06, + "loss": 1.1422, + "step": 922 + }, + { + "epoch": 1.771963514162266, + "grad_norm": 0.6770902691471423, + "learning_rate": 8.162892331109483e-06, + "loss": 1.321, + "step": 923 + }, + { + "epoch": 1.7738838214114259, + "grad_norm": 0.637568102850358, + "learning_rate": 8.157699321618912e-06, + "loss": 1.1804, + "step": 924 + }, + { + "epoch": 1.7758041286605857, + "grad_norm": 0.7382626387455697, + "learning_rate": 8.152500639879968e-06, + "loss": 1.4325, + "step": 925 + }, + { + "epoch": 1.7777244359097457, + "grad_norm": 0.6818352440888269, + "learning_rate": 8.147296295231158e-06, + "loss": 1.2576, + "step": 926 + }, + { + "epoch": 1.7796447431589053, + "grad_norm": 0.7236454601947588, + "learning_rate": 8.14208629702117e-06, + "loss": 1.2064, + "step": 927 + }, + { + "epoch": 1.7815650504080653, + "grad_norm": 0.6339212840273679, + "learning_rate": 8.136870654608842e-06, + "loss": 1.3321, + "step": 928 + }, + { + "epoch": 1.7834853576572252, + "grad_norm": 0.6741918786895994, + "learning_rate": 8.131649377363154e-06, + "loss": 1.2423, + "step": 929 + }, + { + "epoch": 1.785405664906385, + "grad_norm": 0.7124925197818013, + "learning_rate": 8.126422474663205e-06, + "loss": 1.2692, + "step": 930 + }, + { + "epoch": 1.787325972155545, + "grad_norm": 0.6741223242032481, + "learning_rate": 8.121189955898209e-06, + "loss": 1.2554, + "step": 931 + }, + { + "epoch": 1.7892462794047046, + "grad_norm": 0.6802865384435456, + "learning_rate": 8.11595183046745e-06, + "loss": 1.3452, + "step": 932 + }, + { + "epoch": 1.7911665866538646, + "grad_norm": 0.7320772893126046, + "learning_rate": 8.1107081077803e-06, + "loss": 1.3645, + "step": 933 + }, + { + "epoch": 1.7930868939030244, + "grad_norm": 0.7052645982217766, + "learning_rate": 8.105458797256178e-06, + "loss": 1.1873, + "step": 934 + }, + { + "epoch": 1.7950072011521843, + "grad_norm": 0.6249372982539962, + "learning_rate": 8.100203908324545e-06, + "loss": 1.2649, + "step": 935 + }, + { + "epoch": 1.7969275084013443, + "grad_norm": 0.6873208027555705, + "learning_rate": 8.094943450424874e-06, + "loss": 1.3771, + "step": 936 + }, + { + "epoch": 1.7988478156505041, + "grad_norm": 0.6238917875853156, + "learning_rate": 8.089677433006651e-06, + "loss": 1.3356, + "step": 937 + }, + { + "epoch": 1.800768122899664, + "grad_norm": 0.7232238085832342, + "learning_rate": 8.084405865529345e-06, + "loss": 1.3674, + "step": 938 + }, + { + "epoch": 1.802688430148824, + "grad_norm": 0.5989499559453365, + "learning_rate": 8.079128757462393e-06, + "loss": 1.2039, + "step": 939 + }, + { + "epoch": 1.8046087373979836, + "grad_norm": 0.5909026058023332, + "learning_rate": 8.07384611828519e-06, + "loss": 1.0623, + "step": 940 + }, + { + "epoch": 1.8065290446471436, + "grad_norm": 0.7105011473328626, + "learning_rate": 8.068557957487058e-06, + "loss": 1.2987, + "step": 941 + }, + { + "epoch": 1.8084493518963034, + "grad_norm": 0.6532626538849133, + "learning_rate": 8.063264284567245e-06, + "loss": 1.3902, + "step": 942 + }, + { + "epoch": 1.8103696591454632, + "grad_norm": 0.6938980954824397, + "learning_rate": 8.057965109034898e-06, + "loss": 1.4046, + "step": 943 + }, + { + "epoch": 1.8122899663946233, + "grad_norm": 0.6873736259677121, + "learning_rate": 8.052660440409049e-06, + "loss": 1.3805, + "step": 944 + }, + { + "epoch": 1.8142102736437828, + "grad_norm": 0.696256394032348, + "learning_rate": 8.047350288218597e-06, + "loss": 1.2767, + "step": 945 + }, + { + "epoch": 1.8161305808929429, + "grad_norm": 0.682639831162432, + "learning_rate": 8.042034662002291e-06, + "loss": 1.3496, + "step": 946 + }, + { + "epoch": 1.8180508881421027, + "grad_norm": 0.5954020484262338, + "learning_rate": 8.036713571308711e-06, + "loss": 1.3287, + "step": 947 + }, + { + "epoch": 1.8199711953912625, + "grad_norm": 0.594228242920655, + "learning_rate": 8.031387025696262e-06, + "loss": 1.2077, + "step": 948 + }, + { + "epoch": 1.8218915026404225, + "grad_norm": 0.6239403127487786, + "learning_rate": 8.026055034733136e-06, + "loss": 1.1723, + "step": 949 + }, + { + "epoch": 1.8238118098895824, + "grad_norm": 0.8472497042145998, + "learning_rate": 8.020717607997311e-06, + "loss": 1.2776, + "step": 950 + }, + { + "epoch": 1.8257321171387422, + "grad_norm": 0.7073477206436827, + "learning_rate": 8.015374755076533e-06, + "loss": 1.2585, + "step": 951 + }, + { + "epoch": 1.8276524243879022, + "grad_norm": 0.6337785378456356, + "learning_rate": 8.010026485568292e-06, + "loss": 1.1437, + "step": 952 + }, + { + "epoch": 1.8295727316370618, + "grad_norm": 0.6413104778228611, + "learning_rate": 8.004672809079808e-06, + "loss": 1.3064, + "step": 953 + }, + { + "epoch": 1.8314930388862218, + "grad_norm": 0.6969466642249281, + "learning_rate": 7.999313735228012e-06, + "loss": 1.1853, + "step": 954 + }, + { + "epoch": 1.8334133461353816, + "grad_norm": 0.617228891044283, + "learning_rate": 7.993949273639535e-06, + "loss": 1.3008, + "step": 955 + }, + { + "epoch": 1.8353336533845415, + "grad_norm": 0.5988865953262722, + "learning_rate": 7.988579433950682e-06, + "loss": 1.1226, + "step": 956 + }, + { + "epoch": 1.8372539606337015, + "grad_norm": 0.6387895790199921, + "learning_rate": 7.983204225807421e-06, + "loss": 1.2028, + "step": 957 + }, + { + "epoch": 1.839174267882861, + "grad_norm": 0.6697886686110965, + "learning_rate": 7.977823658865364e-06, + "loss": 1.1313, + "step": 958 + }, + { + "epoch": 1.8410945751320211, + "grad_norm": 0.6747989966586785, + "learning_rate": 7.972437742789746e-06, + "loss": 1.2433, + "step": 959 + }, + { + "epoch": 1.843014882381181, + "grad_norm": 0.6640266478824588, + "learning_rate": 7.967046487255412e-06, + "loss": 1.2806, + "step": 960 + }, + { + "epoch": 1.8449351896303408, + "grad_norm": 0.6268462710779998, + "learning_rate": 7.9616499019468e-06, + "loss": 1.3466, + "step": 961 + }, + { + "epoch": 1.8468554968795008, + "grad_norm": 0.6891610903475727, + "learning_rate": 7.956247996557924e-06, + "loss": 1.2218, + "step": 962 + }, + { + "epoch": 1.8487758041286606, + "grad_norm": 0.6382707748643591, + "learning_rate": 7.950840780792348e-06, + "loss": 1.4222, + "step": 963 + }, + { + "epoch": 1.8506961113778204, + "grad_norm": 0.597235927390851, + "learning_rate": 7.94542826436318e-06, + "loss": 1.3666, + "step": 964 + }, + { + "epoch": 1.8526164186269805, + "grad_norm": 0.5958380189432244, + "learning_rate": 7.940010456993048e-06, + "loss": 1.2513, + "step": 965 + }, + { + "epoch": 1.85453672587614, + "grad_norm": 0.8207312265214936, + "learning_rate": 7.934587368414085e-06, + "loss": 1.2384, + "step": 966 + }, + { + "epoch": 1.8564570331253, + "grad_norm": 0.6383139862183063, + "learning_rate": 7.929159008367913e-06, + "loss": 1.3282, + "step": 967 + }, + { + "epoch": 1.85837734037446, + "grad_norm": 0.6473414488061252, + "learning_rate": 7.923725386605617e-06, + "loss": 1.369, + "step": 968 + }, + { + "epoch": 1.8602976476236197, + "grad_norm": 0.6461249289533425, + "learning_rate": 7.918286512887738e-06, + "loss": 1.3708, + "step": 969 + }, + { + "epoch": 1.8622179548727797, + "grad_norm": 0.6996074686318671, + "learning_rate": 7.912842396984256e-06, + "loss": 1.301, + "step": 970 + }, + { + "epoch": 1.8641382621219396, + "grad_norm": 0.747195100156215, + "learning_rate": 7.907393048674554e-06, + "loss": 1.3042, + "step": 971 + }, + { + "epoch": 1.8660585693710994, + "grad_norm": 0.6487597764927422, + "learning_rate": 7.901938477747428e-06, + "loss": 1.3622, + "step": 972 + }, + { + "epoch": 1.8679788766202592, + "grad_norm": 0.6311909241077927, + "learning_rate": 7.89647869400105e-06, + "loss": 1.2718, + "step": 973 + }, + { + "epoch": 1.869899183869419, + "grad_norm": 0.8165044894363437, + "learning_rate": 7.891013707242953e-06, + "loss": 1.3604, + "step": 974 + }, + { + "epoch": 1.871819491118579, + "grad_norm": 0.6283536050272592, + "learning_rate": 7.885543527290023e-06, + "loss": 1.3357, + "step": 975 + }, + { + "epoch": 1.8737397983677389, + "grad_norm": 0.6286945845306801, + "learning_rate": 7.880068163968467e-06, + "loss": 1.3898, + "step": 976 + }, + { + "epoch": 1.8756601056168987, + "grad_norm": 0.7789702794360976, + "learning_rate": 7.874587627113809e-06, + "loss": 1.3092, + "step": 977 + }, + { + "epoch": 1.8775804128660587, + "grad_norm": 0.8489003200129025, + "learning_rate": 7.869101926570864e-06, + "loss": 1.2689, + "step": 978 + }, + { + "epoch": 1.8795007201152183, + "grad_norm": 0.7248282650889785, + "learning_rate": 7.863611072193721e-06, + "loss": 1.1416, + "step": 979 + }, + { + "epoch": 1.8814210273643783, + "grad_norm": 0.6171058372697622, + "learning_rate": 7.858115073845733e-06, + "loss": 1.2812, + "step": 980 + }, + { + "epoch": 1.8833413346135381, + "grad_norm": 0.6593534507567731, + "learning_rate": 7.852613941399487e-06, + "loss": 1.1924, + "step": 981 + }, + { + "epoch": 1.885261641862698, + "grad_norm": 0.7799530933319198, + "learning_rate": 7.847107684736792e-06, + "loss": 1.313, + "step": 982 + }, + { + "epoch": 1.887181949111858, + "grad_norm": 0.6386650101481485, + "learning_rate": 7.841596313748668e-06, + "loss": 1.291, + "step": 983 + }, + { + "epoch": 1.8891022563610178, + "grad_norm": 0.6172368641032806, + "learning_rate": 7.836079838335317e-06, + "loss": 1.1656, + "step": 984 + }, + { + "epoch": 1.8910225636101776, + "grad_norm": 0.6451182909345482, + "learning_rate": 7.830558268406109e-06, + "loss": 1.2536, + "step": 985 + }, + { + "epoch": 1.8929428708593377, + "grad_norm": 0.7371994656615729, + "learning_rate": 7.825031613879572e-06, + "loss": 1.2996, + "step": 986 + }, + { + "epoch": 1.8948631781084972, + "grad_norm": 0.6712446179245939, + "learning_rate": 7.81949988468336e-06, + "loss": 1.2534, + "step": 987 + }, + { + "epoch": 1.8967834853576573, + "grad_norm": 0.6169395012762885, + "learning_rate": 7.813963090754248e-06, + "loss": 1.2692, + "step": 988 + }, + { + "epoch": 1.898703792606817, + "grad_norm": 0.7540727018335017, + "learning_rate": 7.808421242038107e-06, + "loss": 1.4006, + "step": 989 + }, + { + "epoch": 1.900624099855977, + "grad_norm": 0.6610596828013748, + "learning_rate": 7.802874348489887e-06, + "loss": 1.2828, + "step": 990 + }, + { + "epoch": 1.902544407105137, + "grad_norm": 0.5961190818156942, + "learning_rate": 7.797322420073602e-06, + "loss": 1.2956, + "step": 991 + }, + { + "epoch": 1.9044647143542965, + "grad_norm": 0.6595286029545728, + "learning_rate": 7.791765466762308e-06, + "loss": 1.4421, + "step": 992 + }, + { + "epoch": 1.9063850216034566, + "grad_norm": 0.6296949876578616, + "learning_rate": 7.786203498538094e-06, + "loss": 1.352, + "step": 993 + }, + { + "epoch": 1.9083053288526164, + "grad_norm": 0.6073618042792093, + "learning_rate": 7.780636525392047e-06, + "loss": 1.2247, + "step": 994 + }, + { + "epoch": 1.9102256361017762, + "grad_norm": 0.6617452539336137, + "learning_rate": 7.775064557324251e-06, + "loss": 1.2457, + "step": 995 + }, + { + "epoch": 1.9121459433509362, + "grad_norm": 0.6536089335974361, + "learning_rate": 7.769487604343761e-06, + "loss": 1.0969, + "step": 996 + }, + { + "epoch": 1.914066250600096, + "grad_norm": 0.6568241606553313, + "learning_rate": 7.76390567646859e-06, + "loss": 1.2838, + "step": 997 + }, + { + "epoch": 1.9159865578492559, + "grad_norm": 0.6286384085429085, + "learning_rate": 7.758318783725678e-06, + "loss": 1.1126, + "step": 998 + }, + { + "epoch": 1.917906865098416, + "grad_norm": 0.6059610606241428, + "learning_rate": 7.752726936150895e-06, + "loss": 1.3271, + "step": 999 + }, + { + "epoch": 1.9198271723475755, + "grad_norm": 0.6494214620118615, + "learning_rate": 7.747130143789006e-06, + "loss": 1.4887, + "step": 1000 + }, + { + "epoch": 1.9217474795967355, + "grad_norm": 0.6087877694738525, + "learning_rate": 7.741528416693656e-06, + "loss": 1.2577, + "step": 1001 + }, + { + "epoch": 1.9236677868458953, + "grad_norm": 0.6037766820835285, + "learning_rate": 7.73592176492736e-06, + "loss": 1.3029, + "step": 1002 + }, + { + "epoch": 1.9255880940950552, + "grad_norm": 0.642573465220044, + "learning_rate": 7.73031019856147e-06, + "loss": 1.3448, + "step": 1003 + }, + { + "epoch": 1.9275084013442152, + "grad_norm": 0.7197151370634155, + "learning_rate": 7.724693727676181e-06, + "loss": 1.2893, + "step": 1004 + }, + { + "epoch": 1.9294287085933748, + "grad_norm": 0.7163014753258418, + "learning_rate": 7.719072362360482e-06, + "loss": 1.2781, + "step": 1005 + }, + { + "epoch": 1.9313490158425348, + "grad_norm": 0.6339725691672796, + "learning_rate": 7.71344611271217e-06, + "loss": 1.2689, + "step": 1006 + }, + { + "epoch": 1.9332693230916946, + "grad_norm": 0.6255558475512056, + "learning_rate": 7.707814988837798e-06, + "loss": 1.405, + "step": 1007 + }, + { + "epoch": 1.9351896303408544, + "grad_norm": 0.7454922422525622, + "learning_rate": 7.702179000852693e-06, + "loss": 1.4645, + "step": 1008 + }, + { + "epoch": 1.9371099375900145, + "grad_norm": 0.6652688810974042, + "learning_rate": 7.696538158880905e-06, + "loss": 1.2935, + "step": 1009 + }, + { + "epoch": 1.9390302448391743, + "grad_norm": 0.7472100254077061, + "learning_rate": 7.69089247305521e-06, + "loss": 1.1844, + "step": 1010 + }, + { + "epoch": 1.9409505520883341, + "grad_norm": 0.6281178136625541, + "learning_rate": 7.685241953517084e-06, + "loss": 1.2398, + "step": 1011 + }, + { + "epoch": 1.9428708593374941, + "grad_norm": 0.7255326505427592, + "learning_rate": 7.679586610416689e-06, + "loss": 1.169, + "step": 1012 + }, + { + "epoch": 1.9447911665866537, + "grad_norm": 0.6110290026538456, + "learning_rate": 7.673926453912846e-06, + "loss": 1.2312, + "step": 1013 + }, + { + "epoch": 1.9467114738358138, + "grad_norm": 0.5927911874653067, + "learning_rate": 7.668261494173024e-06, + "loss": 1.4202, + "step": 1014 + }, + { + "epoch": 1.9486317810849736, + "grad_norm": 0.8083622338160641, + "learning_rate": 7.662591741373324e-06, + "loss": 1.1196, + "step": 1015 + }, + { + "epoch": 1.9505520883341334, + "grad_norm": 0.5787538666088754, + "learning_rate": 7.656917205698452e-06, + "loss": 1.185, + "step": 1016 + }, + { + "epoch": 1.9524723955832934, + "grad_norm": 0.5986617852156635, + "learning_rate": 7.65123789734171e-06, + "loss": 1.3589, + "step": 1017 + }, + { + "epoch": 1.954392702832453, + "grad_norm": 0.7092631074096405, + "learning_rate": 7.64555382650497e-06, + "loss": 1.447, + "step": 1018 + }, + { + "epoch": 1.956313010081613, + "grad_norm": 0.6945189144617475, + "learning_rate": 7.639865003398659e-06, + "loss": 1.38, + "step": 1019 + }, + { + "epoch": 1.9582333173307729, + "grad_norm": 0.7154207337450551, + "learning_rate": 7.634171438241745e-06, + "loss": 1.0459, + "step": 1020 + }, + { + "epoch": 1.9601536245799327, + "grad_norm": 0.5671478613388374, + "learning_rate": 7.628473141261704e-06, + "loss": 1.2583, + "step": 1021 + }, + { + "epoch": 1.9620739318290927, + "grad_norm": 0.674217778263447, + "learning_rate": 7.622770122694526e-06, + "loss": 1.2944, + "step": 1022 + }, + { + "epoch": 1.9639942390782525, + "grad_norm": 0.6035487994586758, + "learning_rate": 7.617062392784672e-06, + "loss": 1.2888, + "step": 1023 + }, + { + "epoch": 1.9659145463274124, + "grad_norm": 0.715233119055766, + "learning_rate": 7.61134996178507e-06, + "loss": 1.4473, + "step": 1024 + }, + { + "epoch": 1.9678348535765724, + "grad_norm": 0.6781200282810221, + "learning_rate": 7.605632839957091e-06, + "loss": 1.4216, + "step": 1025 + }, + { + "epoch": 1.969755160825732, + "grad_norm": 0.7348434559650923, + "learning_rate": 7.599911037570533e-06, + "loss": 1.296, + "step": 1026 + }, + { + "epoch": 1.971675468074892, + "grad_norm": 0.7174502974503351, + "learning_rate": 7.594184564903605e-06, + "loss": 1.4189, + "step": 1027 + }, + { + "epoch": 1.9735957753240518, + "grad_norm": 0.6554843561695753, + "learning_rate": 7.588453432242899e-06, + "loss": 1.1638, + "step": 1028 + }, + { + "epoch": 1.9755160825732117, + "grad_norm": 0.6193717120764839, + "learning_rate": 7.5827176498833844e-06, + "loss": 1.2138, + "step": 1029 + }, + { + "epoch": 1.9774363898223717, + "grad_norm": 0.6230337020055144, + "learning_rate": 7.576977228128377e-06, + "loss": 1.496, + "step": 1030 + }, + { + "epoch": 1.9793566970715313, + "grad_norm": 0.7516886536978838, + "learning_rate": 7.57123217728953e-06, + "loss": 1.3157, + "step": 1031 + }, + { + "epoch": 1.9812770043206913, + "grad_norm": 0.7082303577635574, + "learning_rate": 7.5654825076868124e-06, + "loss": 1.1099, + "step": 1032 + }, + { + "epoch": 1.9831973115698511, + "grad_norm": 0.630636438554523, + "learning_rate": 7.559728229648489e-06, + "loss": 1.3774, + "step": 1033 + }, + { + "epoch": 1.985117618819011, + "grad_norm": 0.6242705516766209, + "learning_rate": 7.553969353511099e-06, + "loss": 1.2796, + "step": 1034 + }, + { + "epoch": 1.987037926068171, + "grad_norm": 0.6929200704156835, + "learning_rate": 7.5482058896194476e-06, + "loss": 1.1909, + "step": 1035 + }, + { + "epoch": 1.9889582333173308, + "grad_norm": 0.674947851456176, + "learning_rate": 7.5424378483265795e-06, + "loss": 1.2089, + "step": 1036 + }, + { + "epoch": 1.9908785405664906, + "grad_norm": 0.6655740470860053, + "learning_rate": 7.536665239993759e-06, + "loss": 1.3069, + "step": 1037 + }, + { + "epoch": 1.9927988478156506, + "grad_norm": 0.7234506954218087, + "learning_rate": 7.5308880749904576e-06, + "loss": 1.3131, + "step": 1038 + }, + { + "epoch": 1.9947191550648102, + "grad_norm": 0.6336720269563768, + "learning_rate": 7.525106363694328e-06, + "loss": 1.2551, + "step": 1039 + }, + { + "epoch": 1.9966394623139703, + "grad_norm": 0.6654745873292405, + "learning_rate": 7.519320116491195e-06, + "loss": 1.2598, + "step": 1040 + }, + { + "epoch": 1.99855976956313, + "grad_norm": 0.6416056547121506, + "learning_rate": 7.513529343775025e-06, + "loss": 1.3895, + "step": 1041 + }, + { + "epoch": 2.0, + "grad_norm": 0.7381453276807621, + "learning_rate": 7.50773405594792e-06, + "loss": 1.1004, + "step": 1042 + }, + { + "epoch": 2.00192030724916, + "grad_norm": 0.6984585189454183, + "learning_rate": 7.50193426342009e-06, + "loss": 1.2338, + "step": 1043 + }, + { + "epoch": 2.0038406144983196, + "grad_norm": 0.6860005251072618, + "learning_rate": 7.496129976609833e-06, + "loss": 1.1611, + "step": 1044 + }, + { + "epoch": 2.0057609217474797, + "grad_norm": 0.6710341040166462, + "learning_rate": 7.490321205943526e-06, + "loss": 1.1453, + "step": 1045 + }, + { + "epoch": 2.0076812289966393, + "grad_norm": 0.6881057248251186, + "learning_rate": 7.484507961855599e-06, + "loss": 1.3585, + "step": 1046 + }, + { + "epoch": 2.0096015362457993, + "grad_norm": 0.7149692590237099, + "learning_rate": 7.478690254788515e-06, + "loss": 1.3108, + "step": 1047 + }, + { + "epoch": 2.0115218434949593, + "grad_norm": 0.5040575852249538, + "learning_rate": 7.472868095192758e-06, + "loss": 1.2666, + "step": 1048 + }, + { + "epoch": 2.013442150744119, + "grad_norm": 0.6668343259353099, + "learning_rate": 7.467041493526806e-06, + "loss": 1.3017, + "step": 1049 + }, + { + "epoch": 2.015362457993279, + "grad_norm": 0.6213130267441712, + "learning_rate": 7.46121046025712e-06, + "loss": 1.3463, + "step": 1050 + }, + { + "epoch": 2.017282765242439, + "grad_norm": 0.5465921578009497, + "learning_rate": 7.4553750058581186e-06, + "loss": 1.1172, + "step": 1051 + }, + { + "epoch": 2.0192030724915986, + "grad_norm": 0.7058326054289525, + "learning_rate": 7.449535140812164e-06, + "loss": 1.0875, + "step": 1052 + }, + { + "epoch": 2.0211233797407586, + "grad_norm": 0.6224285545020704, + "learning_rate": 7.443690875609543e-06, + "loss": 1.2569, + "step": 1053 + }, + { + "epoch": 2.023043686989918, + "grad_norm": 0.5891570753306204, + "learning_rate": 7.437842220748441e-06, + "loss": 1.2506, + "step": 1054 + }, + { + "epoch": 2.0249639942390782, + "grad_norm": 0.7187156173332934, + "learning_rate": 7.431989186734935e-06, + "loss": 1.1866, + "step": 1055 + }, + { + "epoch": 2.0268843014882383, + "grad_norm": 0.656805950152783, + "learning_rate": 7.4261317840829635e-06, + "loss": 1.2794, + "step": 1056 + }, + { + "epoch": 2.028804608737398, + "grad_norm": 0.6565140993598239, + "learning_rate": 7.420270023314315e-06, + "loss": 1.4964, + "step": 1057 + }, + { + "epoch": 2.030724915986558, + "grad_norm": 0.6674673642468827, + "learning_rate": 7.414403914958607e-06, + "loss": 1.1226, + "step": 1058 + }, + { + "epoch": 2.0326452232357175, + "grad_norm": 0.6383574520864321, + "learning_rate": 7.408533469553264e-06, + "loss": 1.276, + "step": 1059 + }, + { + "epoch": 2.0345655304848775, + "grad_norm": 0.7154715475803043, + "learning_rate": 7.402658697643504e-06, + "loss": 1.2388, + "step": 1060 + }, + { + "epoch": 2.0364858377340376, + "grad_norm": 0.5223568124972519, + "learning_rate": 7.396779609782316e-06, + "loss": 1.2159, + "step": 1061 + }, + { + "epoch": 2.038406144983197, + "grad_norm": 0.5739489933046834, + "learning_rate": 7.390896216530442e-06, + "loss": 1.2659, + "step": 1062 + }, + { + "epoch": 2.040326452232357, + "grad_norm": 0.6737444843346947, + "learning_rate": 7.385008528456357e-06, + "loss": 1.3207, + "step": 1063 + }, + { + "epoch": 2.0422467594815172, + "grad_norm": 0.5760491490696834, + "learning_rate": 7.379116556136251e-06, + "loss": 1.2992, + "step": 1064 + }, + { + "epoch": 2.044167066730677, + "grad_norm": 0.656419511670537, + "learning_rate": 7.373220310154008e-06, + "loss": 1.2658, + "step": 1065 + }, + { + "epoch": 2.046087373979837, + "grad_norm": 0.6150154633631958, + "learning_rate": 7.367319801101196e-06, + "loss": 1.3401, + "step": 1066 + }, + { + "epoch": 2.0480076812289965, + "grad_norm": 1.021222823586059, + "learning_rate": 7.361415039577033e-06, + "loss": 1.2002, + "step": 1067 + }, + { + "epoch": 2.0499279884781565, + "grad_norm": 0.5854320877750915, + "learning_rate": 7.355506036188379e-06, + "loss": 1.1323, + "step": 1068 + }, + { + "epoch": 2.0518482957273165, + "grad_norm": 0.7216867130918194, + "learning_rate": 7.349592801549715e-06, + "loss": 1.2601, + "step": 1069 + }, + { + "epoch": 2.053768602976476, + "grad_norm": 0.6194054541902049, + "learning_rate": 7.343675346283118e-06, + "loss": 1.2808, + "step": 1070 + }, + { + "epoch": 2.055688910225636, + "grad_norm": 0.9039301133482249, + "learning_rate": 7.337753681018251e-06, + "loss": 1.2842, + "step": 1071 + }, + { + "epoch": 2.0576092174747957, + "grad_norm": 0.5876501457594527, + "learning_rate": 7.331827816392341e-06, + "loss": 1.221, + "step": 1072 + }, + { + "epoch": 2.059529524723956, + "grad_norm": 0.6740935986291116, + "learning_rate": 7.325897763050155e-06, + "loss": 1.267, + "step": 1073 + }, + { + "epoch": 2.061449831973116, + "grad_norm": 0.6337403720742513, + "learning_rate": 7.319963531643983e-06, + "loss": 1.2348, + "step": 1074 + }, + { + "epoch": 2.0633701392222754, + "grad_norm": 0.6354665055103681, + "learning_rate": 7.3140251328336234e-06, + "loss": 1.2987, + "step": 1075 + }, + { + "epoch": 2.0652904464714354, + "grad_norm": 0.5933193811451509, + "learning_rate": 7.308082577286359e-06, + "loss": 1.2048, + "step": 1076 + }, + { + "epoch": 2.0672107537205955, + "grad_norm": 0.7623792540828787, + "learning_rate": 7.3021358756769425e-06, + "loss": 1.2341, + "step": 1077 + }, + { + "epoch": 2.069131060969755, + "grad_norm": 0.6015392053110737, + "learning_rate": 7.296185038687566e-06, + "loss": 1.2542, + "step": 1078 + }, + { + "epoch": 2.071051368218915, + "grad_norm": 0.6955251812505019, + "learning_rate": 7.290230077007863e-06, + "loss": 1.1749, + "step": 1079 + }, + { + "epoch": 2.0729716754680747, + "grad_norm": 0.6087755249687018, + "learning_rate": 7.284271001334862e-06, + "loss": 1.2355, + "step": 1080 + }, + { + "epoch": 2.0748919827172347, + "grad_norm": 0.5589403698261416, + "learning_rate": 7.278307822372992e-06, + "loss": 1.2345, + "step": 1081 + }, + { + "epoch": 2.0768122899663948, + "grad_norm": 0.6219252458630142, + "learning_rate": 7.272340550834049e-06, + "loss": 1.2623, + "step": 1082 + }, + { + "epoch": 2.0787325972155544, + "grad_norm": 0.6568214702123812, + "learning_rate": 7.266369197437182e-06, + "loss": 1.2551, + "step": 1083 + }, + { + "epoch": 2.0806529044647144, + "grad_norm": 0.6644980261981873, + "learning_rate": 7.26039377290887e-06, + "loss": 1.2986, + "step": 1084 + }, + { + "epoch": 2.082573211713874, + "grad_norm": 0.652931545403877, + "learning_rate": 7.254414287982907e-06, + "loss": 1.2229, + "step": 1085 + }, + { + "epoch": 2.084493518963034, + "grad_norm": 0.5965090727981581, + "learning_rate": 7.24843075340038e-06, + "loss": 1.187, + "step": 1086 + }, + { + "epoch": 2.086413826212194, + "grad_norm": 0.6636455612362417, + "learning_rate": 7.242443179909649e-06, + "loss": 1.2377, + "step": 1087 + }, + { + "epoch": 2.0883341334613537, + "grad_norm": 0.5706280085675597, + "learning_rate": 7.236451578266334e-06, + "loss": 1.3363, + "step": 1088 + }, + { + "epoch": 2.0902544407105137, + "grad_norm": 0.5452970868510953, + "learning_rate": 7.230455959233284e-06, + "loss": 1.2301, + "step": 1089 + }, + { + "epoch": 2.0921747479596737, + "grad_norm": 0.732832750259065, + "learning_rate": 7.224456333580574e-06, + "loss": 1.2697, + "step": 1090 + }, + { + "epoch": 2.0940950552088333, + "grad_norm": 0.5776781009920395, + "learning_rate": 7.218452712085464e-06, + "loss": 1.1108, + "step": 1091 + }, + { + "epoch": 2.0960153624579934, + "grad_norm": 0.7340170720293495, + "learning_rate": 7.212445105532402e-06, + "loss": 1.4598, + "step": 1092 + }, + { + "epoch": 2.097935669707153, + "grad_norm": 0.5608573831708831, + "learning_rate": 7.206433524712989e-06, + "loss": 1.3114, + "step": 1093 + }, + { + "epoch": 2.099855976956313, + "grad_norm": 0.6123136642885363, + "learning_rate": 7.200417980425969e-06, + "loss": 1.176, + "step": 1094 + }, + { + "epoch": 2.101776284205473, + "grad_norm": 0.6263758820886606, + "learning_rate": 7.1943984834771995e-06, + "loss": 1.3055, + "step": 1095 + }, + { + "epoch": 2.1036965914546326, + "grad_norm": 0.6301511028435693, + "learning_rate": 7.188375044679645e-06, + "loss": 1.2084, + "step": 1096 + }, + { + "epoch": 2.1056168987037926, + "grad_norm": 0.5961686912489383, + "learning_rate": 7.182347674853349e-06, + "loss": 1.1919, + "step": 1097 + }, + { + "epoch": 2.1075372059529527, + "grad_norm": 0.6383316351170043, + "learning_rate": 7.176316384825414e-06, + "loss": 1.2242, + "step": 1098 + }, + { + "epoch": 2.1094575132021123, + "grad_norm": 0.6832418991450104, + "learning_rate": 7.170281185429986e-06, + "loss": 1.2969, + "step": 1099 + }, + { + "epoch": 2.1113778204512723, + "grad_norm": 0.7003707945546503, + "learning_rate": 7.164242087508232e-06, + "loss": 1.3452, + "step": 1100 + }, + { + "epoch": 2.113298127700432, + "grad_norm": 0.6524324557488448, + "learning_rate": 7.1581991019083255e-06, + "loss": 1.2907, + "step": 1101 + }, + { + "epoch": 2.115218434949592, + "grad_norm": 0.6346709626357138, + "learning_rate": 7.152152239485419e-06, + "loss": 1.2925, + "step": 1102 + }, + { + "epoch": 2.117138742198752, + "grad_norm": 0.6027369910884686, + "learning_rate": 7.1461015111016365e-06, + "loss": 1.2191, + "step": 1103 + }, + { + "epoch": 2.1190590494479116, + "grad_norm": 0.7317096771295276, + "learning_rate": 7.140046927626034e-06, + "loss": 1.2868, + "step": 1104 + }, + { + "epoch": 2.1209793566970716, + "grad_norm": 0.664283102964385, + "learning_rate": 7.1339884999346065e-06, + "loss": 1.2537, + "step": 1105 + }, + { + "epoch": 2.122899663946231, + "grad_norm": 0.5617222908710969, + "learning_rate": 7.127926238910243e-06, + "loss": 1.203, + "step": 1106 + }, + { + "epoch": 2.1248199711953912, + "grad_norm": 0.6937789147849563, + "learning_rate": 7.121860155442727e-06, + "loss": 1.18, + "step": 1107 + }, + { + "epoch": 2.1267402784445513, + "grad_norm": 0.6090841223763318, + "learning_rate": 7.115790260428704e-06, + "loss": 1.2649, + "step": 1108 + }, + { + "epoch": 2.128660585693711, + "grad_norm": 0.5913292372532395, + "learning_rate": 7.109716564771663e-06, + "loss": 1.3238, + "step": 1109 + }, + { + "epoch": 2.130580892942871, + "grad_norm": 0.6373338530440186, + "learning_rate": 7.103639079381931e-06, + "loss": 1.3145, + "step": 1110 + }, + { + "epoch": 2.132501200192031, + "grad_norm": 0.7356254332331955, + "learning_rate": 7.09755781517663e-06, + "loss": 1.3062, + "step": 1111 + }, + { + "epoch": 2.1344215074411905, + "grad_norm": 0.6354999779911574, + "learning_rate": 7.091472783079677e-06, + "loss": 1.2691, + "step": 1112 + }, + { + "epoch": 2.1363418146903506, + "grad_norm": 0.634333016129095, + "learning_rate": 7.085383994021757e-06, + "loss": 1.2463, + "step": 1113 + }, + { + "epoch": 2.13826212193951, + "grad_norm": 0.628640631103715, + "learning_rate": 7.079291458940302e-06, + "loss": 1.3555, + "step": 1114 + }, + { + "epoch": 2.14018242918867, + "grad_norm": 0.5878511095609504, + "learning_rate": 7.073195188779474e-06, + "loss": 1.2021, + "step": 1115 + }, + { + "epoch": 2.1421027364378302, + "grad_norm": 0.6234889678557616, + "learning_rate": 7.067095194490143e-06, + "loss": 1.2481, + "step": 1116 + }, + { + "epoch": 2.14402304368699, + "grad_norm": 0.5849361230078369, + "learning_rate": 7.060991487029872e-06, + "loss": 1.148, + "step": 1117 + }, + { + "epoch": 2.14594335093615, + "grad_norm": 0.5677121596010603, + "learning_rate": 7.0548840773628915e-06, + "loss": 1.2142, + "step": 1118 + }, + { + "epoch": 2.1478636581853094, + "grad_norm": 0.6134358379808856, + "learning_rate": 7.0487729764600824e-06, + "loss": 1.0703, + "step": 1119 + }, + { + "epoch": 2.1497839654344695, + "grad_norm": 0.5806561061052112, + "learning_rate": 7.042658195298956e-06, + "loss": 1.3193, + "step": 1120 + }, + { + "epoch": 2.1517042726836295, + "grad_norm": 0.6169966362309864, + "learning_rate": 7.036539744863636e-06, + "loss": 1.266, + "step": 1121 + }, + { + "epoch": 2.153624579932789, + "grad_norm": 0.6281321250720722, + "learning_rate": 7.030417636144836e-06, + "loss": 1.3127, + "step": 1122 + }, + { + "epoch": 2.155544887181949, + "grad_norm": 0.552552267873573, + "learning_rate": 7.024291880139843e-06, + "loss": 1.1968, + "step": 1123 + }, + { + "epoch": 2.157465194431109, + "grad_norm": 0.7742167483021233, + "learning_rate": 7.018162487852494e-06, + "loss": 1.2514, + "step": 1124 + }, + { + "epoch": 2.1593855016802688, + "grad_norm": 0.6102221138856729, + "learning_rate": 7.012029470293156e-06, + "loss": 1.2888, + "step": 1125 + }, + { + "epoch": 2.161305808929429, + "grad_norm": 0.678988077797767, + "learning_rate": 7.0058928384787115e-06, + "loss": 1.2372, + "step": 1126 + }, + { + "epoch": 2.1632261161785884, + "grad_norm": 0.6737189312331883, + "learning_rate": 6.999752603432534e-06, + "loss": 1.1838, + "step": 1127 + }, + { + "epoch": 2.1651464234277484, + "grad_norm": 0.596052318398853, + "learning_rate": 6.993608776184473e-06, + "loss": 1.2675, + "step": 1128 + }, + { + "epoch": 2.1670667306769085, + "grad_norm": 0.5778382747735666, + "learning_rate": 6.987461367770825e-06, + "loss": 1.1558, + "step": 1129 + }, + { + "epoch": 2.168987037926068, + "grad_norm": 0.7612550327008405, + "learning_rate": 6.9813103892343205e-06, + "loss": 1.1798, + "step": 1130 + }, + { + "epoch": 2.170907345175228, + "grad_norm": 0.6250451986476963, + "learning_rate": 6.975155851624107e-06, + "loss": 1.2171, + "step": 1131 + }, + { + "epoch": 2.172827652424388, + "grad_norm": 0.617676347733231, + "learning_rate": 6.968997765995722e-06, + "loss": 1.2002, + "step": 1132 + }, + { + "epoch": 2.1747479596735477, + "grad_norm": 0.7215322900975272, + "learning_rate": 6.962836143411077e-06, + "loss": 1.1815, + "step": 1133 + }, + { + "epoch": 2.1766682669227078, + "grad_norm": 0.6889525154465694, + "learning_rate": 6.956670994938438e-06, + "loss": 1.2466, + "step": 1134 + }, + { + "epoch": 2.1785885741718674, + "grad_norm": 0.5751161226801118, + "learning_rate": 6.9505023316524024e-06, + "loss": 1.2167, + "step": 1135 + }, + { + "epoch": 2.1805088814210274, + "grad_norm": 0.6513915776200747, + "learning_rate": 6.944330164633886e-06, + "loss": 1.3228, + "step": 1136 + }, + { + "epoch": 2.1824291886701874, + "grad_norm": 0.6003888012093858, + "learning_rate": 6.938154504970092e-06, + "loss": 1.2462, + "step": 1137 + }, + { + "epoch": 2.184349495919347, + "grad_norm": 0.6206783890336667, + "learning_rate": 6.931975363754502e-06, + "loss": 1.291, + "step": 1138 + }, + { + "epoch": 2.186269803168507, + "grad_norm": 0.7554272133869872, + "learning_rate": 6.92579275208685e-06, + "loss": 1.1497, + "step": 1139 + }, + { + "epoch": 2.1881901104176666, + "grad_norm": 0.6236041362503416, + "learning_rate": 6.9196066810731055e-06, + "loss": 1.2785, + "step": 1140 + }, + { + "epoch": 2.1901104176668267, + "grad_norm": 0.5902897052882286, + "learning_rate": 6.913417161825449e-06, + "loss": 1.2064, + "step": 1141 + }, + { + "epoch": 2.1920307249159867, + "grad_norm": 0.6788328193144818, + "learning_rate": 6.90722420546226e-06, + "loss": 1.201, + "step": 1142 + }, + { + "epoch": 2.1939510321651463, + "grad_norm": 0.7102736450080036, + "learning_rate": 6.901027823108088e-06, + "loss": 1.1767, + "step": 1143 + }, + { + "epoch": 2.1958713394143063, + "grad_norm": 0.6002222918018697, + "learning_rate": 6.894828025893636e-06, + "loss": 1.2062, + "step": 1144 + }, + { + "epoch": 2.1977916466634664, + "grad_norm": 0.5307888776119263, + "learning_rate": 6.888624824955746e-06, + "loss": 1.1376, + "step": 1145 + }, + { + "epoch": 2.199711953912626, + "grad_norm": 0.6991995097343485, + "learning_rate": 6.882418231437371e-06, + "loss": 1.2353, + "step": 1146 + }, + { + "epoch": 2.201632261161786, + "grad_norm": 0.5955303781294875, + "learning_rate": 6.87620825648756e-06, + "loss": 1.14, + "step": 1147 + }, + { + "epoch": 2.2035525684109456, + "grad_norm": 0.6099852257845632, + "learning_rate": 6.869994911261429e-06, + "loss": 1.354, + "step": 1148 + }, + { + "epoch": 2.2054728756601056, + "grad_norm": 0.6184213161381115, + "learning_rate": 6.863778206920161e-06, + "loss": 1.228, + "step": 1149 + }, + { + "epoch": 2.2073931829092657, + "grad_norm": 0.6632479032393596, + "learning_rate": 6.8575581546309614e-06, + "loss": 1.19, + "step": 1150 + }, + { + "epoch": 2.2093134901584253, + "grad_norm": 0.736023733055779, + "learning_rate": 6.851334765567054e-06, + "loss": 1.2977, + "step": 1151 + }, + { + "epoch": 2.2112337974075853, + "grad_norm": 0.5801625495939454, + "learning_rate": 6.8451080509076594e-06, + "loss": 1.3641, + "step": 1152 + }, + { + "epoch": 2.213154104656745, + "grad_norm": 0.5864932647879217, + "learning_rate": 6.838878021837968e-06, + "loss": 1.0784, + "step": 1153 + }, + { + "epoch": 2.215074411905905, + "grad_norm": 0.6816487618025293, + "learning_rate": 6.832644689549124e-06, + "loss": 1.3253, + "step": 1154 + }, + { + "epoch": 2.216994719155065, + "grad_norm": 0.685898447688162, + "learning_rate": 6.826408065238208e-06, + "loss": 1.3172, + "step": 1155 + }, + { + "epoch": 2.2189150264042246, + "grad_norm": 0.6181285906434999, + "learning_rate": 6.820168160108211e-06, + "loss": 1.3543, + "step": 1156 + }, + { + "epoch": 2.2208353336533846, + "grad_norm": 0.6573999090601903, + "learning_rate": 6.813924985368021e-06, + "loss": 1.1342, + "step": 1157 + }, + { + "epoch": 2.2227556409025446, + "grad_norm": 0.6345506761790236, + "learning_rate": 6.807678552232397e-06, + "loss": 1.3285, + "step": 1158 + }, + { + "epoch": 2.224675948151704, + "grad_norm": 0.5910829243317203, + "learning_rate": 6.8014288719219505e-06, + "loss": 1.2601, + "step": 1159 + }, + { + "epoch": 2.2265962554008643, + "grad_norm": 0.5952533335690797, + "learning_rate": 6.795175955663127e-06, + "loss": 1.2067, + "step": 1160 + }, + { + "epoch": 2.228516562650024, + "grad_norm": 0.5720846452241152, + "learning_rate": 6.788919814688183e-06, + "loss": 1.2387, + "step": 1161 + }, + { + "epoch": 2.230436869899184, + "grad_norm": 0.6051635760894251, + "learning_rate": 6.782660460235174e-06, + "loss": 1.198, + "step": 1162 + }, + { + "epoch": 2.232357177148344, + "grad_norm": 0.6808334105131638, + "learning_rate": 6.776397903547919e-06, + "loss": 1.2227, + "step": 1163 + }, + { + "epoch": 2.2342774843975035, + "grad_norm": 0.6930823511105796, + "learning_rate": 6.770132155875994e-06, + "loss": 1.2159, + "step": 1164 + }, + { + "epoch": 2.2361977916466635, + "grad_norm": 0.6416443437211603, + "learning_rate": 6.76386322847471e-06, + "loss": 1.3446, + "step": 1165 + }, + { + "epoch": 2.238118098895823, + "grad_norm": 0.7018085526370589, + "learning_rate": 6.757591132605082e-06, + "loss": 1.3883, + "step": 1166 + }, + { + "epoch": 2.240038406144983, + "grad_norm": 0.5811582821637894, + "learning_rate": 6.7513158795338245e-06, + "loss": 1.2509, + "step": 1167 + }, + { + "epoch": 2.241958713394143, + "grad_norm": 0.6921861009803321, + "learning_rate": 6.745037480533316e-06, + "loss": 1.2685, + "step": 1168 + }, + { + "epoch": 2.243879020643303, + "grad_norm": 0.6003605885065617, + "learning_rate": 6.738755946881593e-06, + "loss": 1.1512, + "step": 1169 + }, + { + "epoch": 2.245799327892463, + "grad_norm": 0.6330910082110218, + "learning_rate": 6.73247128986232e-06, + "loss": 1.3939, + "step": 1170 + }, + { + "epoch": 2.247719635141623, + "grad_norm": 0.7009718702070067, + "learning_rate": 6.72618352076477e-06, + "loss": 1.3504, + "step": 1171 + }, + { + "epoch": 2.2496399423907825, + "grad_norm": 0.6243298363689899, + "learning_rate": 6.7198926508838095e-06, + "loss": 1.2077, + "step": 1172 + }, + { + "epoch": 2.2515602496399425, + "grad_norm": 0.665060474987669, + "learning_rate": 6.713598691519873e-06, + "loss": 1.2854, + "step": 1173 + }, + { + "epoch": 2.253480556889102, + "grad_norm": 0.6795425121016735, + "learning_rate": 6.707301653978945e-06, + "loss": 1.211, + "step": 1174 + }, + { + "epoch": 2.255400864138262, + "grad_norm": 0.649787483941008, + "learning_rate": 6.701001549572541e-06, + "loss": 1.2618, + "step": 1175 + }, + { + "epoch": 2.257321171387422, + "grad_norm": 0.5530616366843604, + "learning_rate": 6.694698389617684e-06, + "loss": 1.189, + "step": 1176 + }, + { + "epoch": 2.2592414786365818, + "grad_norm": 0.7578591256854189, + "learning_rate": 6.688392185436884e-06, + "loss": 1.1721, + "step": 1177 + }, + { + "epoch": 2.261161785885742, + "grad_norm": 0.681600411779859, + "learning_rate": 6.682082948358125e-06, + "loss": 1.1017, + "step": 1178 + }, + { + "epoch": 2.2630820931349014, + "grad_norm": 0.5753391550196724, + "learning_rate": 6.675770689714832e-06, + "loss": 1.2545, + "step": 1179 + }, + { + "epoch": 2.2650024003840614, + "grad_norm": 0.5926485691603264, + "learning_rate": 6.6694554208458665e-06, + "loss": 1.3251, + "step": 1180 + }, + { + "epoch": 2.2669227076332215, + "grad_norm": 0.7701074371021729, + "learning_rate": 6.663137153095487e-06, + "loss": 1.3517, + "step": 1181 + }, + { + "epoch": 2.268843014882381, + "grad_norm": 0.6037580306390564, + "learning_rate": 6.6568158978133455e-06, + "loss": 1.1961, + "step": 1182 + }, + { + "epoch": 2.270763322131541, + "grad_norm": 0.6242293746700862, + "learning_rate": 6.65049166635446e-06, + "loss": 1.2815, + "step": 1183 + }, + { + "epoch": 2.272683629380701, + "grad_norm": 0.7092181079591579, + "learning_rate": 6.644164470079193e-06, + "loss": 1.259, + "step": 1184 + }, + { + "epoch": 2.2746039366298607, + "grad_norm": 0.7292906796102022, + "learning_rate": 6.637834320353235e-06, + "loss": 1.3405, + "step": 1185 + }, + { + "epoch": 2.2765242438790207, + "grad_norm": 0.5858173584780139, + "learning_rate": 6.63150122854758e-06, + "loss": 1.2043, + "step": 1186 + }, + { + "epoch": 2.2784445511281803, + "grad_norm": 0.6644872468479983, + "learning_rate": 6.625165206038504e-06, + "loss": 1.3984, + "step": 1187 + }, + { + "epoch": 2.2803648583773404, + "grad_norm": 0.6617391253904775, + "learning_rate": 6.6188262642075566e-06, + "loss": 1.2627, + "step": 1188 + }, + { + "epoch": 2.2822851656265004, + "grad_norm": 0.6415220686748404, + "learning_rate": 6.612484414441522e-06, + "loss": 1.2287, + "step": 1189 + }, + { + "epoch": 2.28420547287566, + "grad_norm": 0.6597124470851262, + "learning_rate": 6.606139668132412e-06, + "loss": 1.295, + "step": 1190 + }, + { + "epoch": 2.28612578012482, + "grad_norm": 0.6003215660383565, + "learning_rate": 6.599792036677444e-06, + "loss": 1.286, + "step": 1191 + }, + { + "epoch": 2.2880460873739796, + "grad_norm": 0.6684003644336577, + "learning_rate": 6.593441531479011e-06, + "loss": 1.1995, + "step": 1192 + }, + { + "epoch": 2.2899663946231397, + "grad_norm": 0.613380897872638, + "learning_rate": 6.587088163944677e-06, + "loss": 1.2017, + "step": 1193 + }, + { + "epoch": 2.2918867018722997, + "grad_norm": 0.6759779333103918, + "learning_rate": 6.5807319454871385e-06, + "loss": 1.3439, + "step": 1194 + }, + { + "epoch": 2.2938070091214593, + "grad_norm": 0.6209490809831877, + "learning_rate": 6.574372887524221e-06, + "loss": 1.4075, + "step": 1195 + }, + { + "epoch": 2.2957273163706193, + "grad_norm": 0.7296562782355499, + "learning_rate": 6.568011001478846e-06, + "loss": 1.376, + "step": 1196 + }, + { + "epoch": 2.2976476236197794, + "grad_norm": 0.7580590870766526, + "learning_rate": 6.561646298779019e-06, + "loss": 1.2304, + "step": 1197 + }, + { + "epoch": 2.299567930868939, + "grad_norm": 0.6166792793488481, + "learning_rate": 6.5552787908578e-06, + "loss": 1.2215, + "step": 1198 + }, + { + "epoch": 2.301488238118099, + "grad_norm": 0.6233271317496348, + "learning_rate": 6.548908489153292e-06, + "loss": 1.2564, + "step": 1199 + }, + { + "epoch": 2.3034085453672586, + "grad_norm": 0.6182453476486088, + "learning_rate": 6.542535405108614e-06, + "loss": 1.0683, + "step": 1200 + }, + { + "epoch": 2.3053288526164186, + "grad_norm": 0.6326303682008909, + "learning_rate": 6.536159550171888e-06, + "loss": 1.2443, + "step": 1201 + }, + { + "epoch": 2.3072491598655787, + "grad_norm": 0.6398707602190346, + "learning_rate": 6.5297809357962064e-06, + "loss": 1.1574, + "step": 1202 + }, + { + "epoch": 2.3091694671147383, + "grad_norm": 0.6852341547792379, + "learning_rate": 6.523399573439621e-06, + "loss": 1.1444, + "step": 1203 + }, + { + "epoch": 2.3110897743638983, + "grad_norm": 0.616233805909782, + "learning_rate": 6.517015474565127e-06, + "loss": 1.2797, + "step": 1204 + }, + { + "epoch": 2.313010081613058, + "grad_norm": 0.5644035791459282, + "learning_rate": 6.51062865064062e-06, + "loss": 1.1528, + "step": 1205 + }, + { + "epoch": 2.314930388862218, + "grad_norm": 0.5470958572312058, + "learning_rate": 6.5042391131389086e-06, + "loss": 1.1724, + "step": 1206 + }, + { + "epoch": 2.316850696111378, + "grad_norm": 0.8369300122733229, + "learning_rate": 6.49784687353766e-06, + "loss": 1.1318, + "step": 1207 + }, + { + "epoch": 2.3187710033605375, + "grad_norm": 0.6176806677274713, + "learning_rate": 6.4914519433194046e-06, + "loss": 1.1819, + "step": 1208 + }, + { + "epoch": 2.3206913106096976, + "grad_norm": 0.70239567156242, + "learning_rate": 6.485054333971505e-06, + "loss": 1.3287, + "step": 1209 + }, + { + "epoch": 2.3226116178588576, + "grad_norm": 0.6849156492178405, + "learning_rate": 6.4786540569861315e-06, + "loss": 1.2436, + "step": 1210 + }, + { + "epoch": 2.324531925108017, + "grad_norm": 0.623250772845043, + "learning_rate": 6.472251123860252e-06, + "loss": 1.0973, + "step": 1211 + }, + { + "epoch": 2.3264522323571772, + "grad_norm": 0.6275812836811542, + "learning_rate": 6.465845546095605e-06, + "loss": 1.1364, + "step": 1212 + }, + { + "epoch": 2.328372539606337, + "grad_norm": 0.6215409724184875, + "learning_rate": 6.459437335198676e-06, + "loss": 1.2874, + "step": 1213 + }, + { + "epoch": 2.330292846855497, + "grad_norm": 0.6379824148538079, + "learning_rate": 6.453026502680683e-06, + "loss": 1.2216, + "step": 1214 + }, + { + "epoch": 2.332213154104657, + "grad_norm": 0.6794226960110918, + "learning_rate": 6.446613060057551e-06, + "loss": 1.3418, + "step": 1215 + }, + { + "epoch": 2.3341334613538165, + "grad_norm": 0.6520127896780581, + "learning_rate": 6.4401970188499e-06, + "loss": 1.1196, + "step": 1216 + }, + { + "epoch": 2.3360537686029765, + "grad_norm": 0.7316497491112776, + "learning_rate": 6.43377839058301e-06, + "loss": 1.2544, + "step": 1217 + }, + { + "epoch": 2.337974075852136, + "grad_norm": 0.6291690821103928, + "learning_rate": 6.42735718678681e-06, + "loss": 1.2161, + "step": 1218 + }, + { + "epoch": 2.339894383101296, + "grad_norm": 0.7107157838496435, + "learning_rate": 6.420933418995861e-06, + "loss": 1.3376, + "step": 1219 + }, + { + "epoch": 2.341814690350456, + "grad_norm": 0.6661525380725084, + "learning_rate": 6.414507098749324e-06, + "loss": 1.3506, + "step": 1220 + }, + { + "epoch": 2.343734997599616, + "grad_norm": 0.6653555103275538, + "learning_rate": 6.4080782375909455e-06, + "loss": 1.1913, + "step": 1221 + }, + { + "epoch": 2.345655304848776, + "grad_norm": 0.6138861767200757, + "learning_rate": 6.401646847069038e-06, + "loss": 1.2745, + "step": 1222 + }, + { + "epoch": 2.347575612097936, + "grad_norm": 0.6543629399503493, + "learning_rate": 6.39521293873646e-06, + "loss": 1.2057, + "step": 1223 + }, + { + "epoch": 2.3494959193470955, + "grad_norm": 0.5774136334197446, + "learning_rate": 6.388776524150586e-06, + "loss": 1.174, + "step": 1224 + }, + { + "epoch": 2.3514162265962555, + "grad_norm": 0.5859347161113456, + "learning_rate": 6.3823376148733e-06, + "loss": 1.2425, + "step": 1225 + }, + { + "epoch": 2.353336533845415, + "grad_norm": 0.6408466768455149, + "learning_rate": 6.375896222470961e-06, + "loss": 1.1288, + "step": 1226 + }, + { + "epoch": 2.355256841094575, + "grad_norm": 0.6146169617139874, + "learning_rate": 6.369452358514392e-06, + "loss": 1.2214, + "step": 1227 + }, + { + "epoch": 2.357177148343735, + "grad_norm": 0.5819190647223798, + "learning_rate": 6.363006034578856e-06, + "loss": 1.2009, + "step": 1228 + }, + { + "epoch": 2.3590974555928947, + "grad_norm": 0.6030278958480552, + "learning_rate": 6.356557262244033e-06, + "loss": 1.165, + "step": 1229 + }, + { + "epoch": 2.361017762842055, + "grad_norm": 0.6491642246517341, + "learning_rate": 6.350106053094004e-06, + "loss": 1.1608, + "step": 1230 + }, + { + "epoch": 2.3629380700912144, + "grad_norm": 0.6463817243208629, + "learning_rate": 6.34365241871722e-06, + "loss": 1.3024, + "step": 1231 + }, + { + "epoch": 2.3648583773403744, + "grad_norm": 0.7103506380741759, + "learning_rate": 6.3371963707065e-06, + "loss": 1.382, + "step": 1232 + }, + { + "epoch": 2.3667786845895344, + "grad_norm": 0.6785469941350024, + "learning_rate": 6.330737920658989e-06, + "loss": 1.2195, + "step": 1233 + }, + { + "epoch": 2.368698991838694, + "grad_norm": 0.5570736910667738, + "learning_rate": 6.324277080176151e-06, + "loss": 1.2398, + "step": 1234 + }, + { + "epoch": 2.370619299087854, + "grad_norm": 0.6701770195276846, + "learning_rate": 6.317813860863743e-06, + "loss": 1.093, + "step": 1235 + }, + { + "epoch": 2.372539606337014, + "grad_norm": 0.6046075353253518, + "learning_rate": 6.311348274331797e-06, + "loss": 1.325, + "step": 1236 + }, + { + "epoch": 2.3744599135861737, + "grad_norm": 0.594000111250696, + "learning_rate": 6.304880332194593e-06, + "loss": 1.2857, + "step": 1237 + }, + { + "epoch": 2.3763802208353337, + "grad_norm": 0.6346049535343968, + "learning_rate": 6.2984100460706476e-06, + "loss": 1.2374, + "step": 1238 + }, + { + "epoch": 2.3783005280844933, + "grad_norm": 0.5748289888747085, + "learning_rate": 6.2919374275826835e-06, + "loss": 1.3284, + "step": 1239 + }, + { + "epoch": 2.3802208353336534, + "grad_norm": 0.6113265562594793, + "learning_rate": 6.285462488357618e-06, + "loss": 1.1277, + "step": 1240 + }, + { + "epoch": 2.3821411425828134, + "grad_norm": 0.6292319853626924, + "learning_rate": 6.278985240026532e-06, + "loss": 1.1987, + "step": 1241 + }, + { + "epoch": 2.384061449831973, + "grad_norm": 0.6263302899230722, + "learning_rate": 6.272505694224655e-06, + "loss": 1.3001, + "step": 1242 + }, + { + "epoch": 2.385981757081133, + "grad_norm": 0.6136706336821629, + "learning_rate": 6.26602386259135e-06, + "loss": 1.1442, + "step": 1243 + }, + { + "epoch": 2.3879020643302926, + "grad_norm": 0.5570855018978512, + "learning_rate": 6.259539756770078e-06, + "loss": 1.1984, + "step": 1244 + }, + { + "epoch": 2.3898223715794527, + "grad_norm": 0.5596547520906855, + "learning_rate": 6.253053388408389e-06, + "loss": 1.1809, + "step": 1245 + }, + { + "epoch": 2.3917426788286127, + "grad_norm": 0.693407976065708, + "learning_rate": 6.246564769157895e-06, + "loss": 1.3885, + "step": 1246 + }, + { + "epoch": 2.3936629860777723, + "grad_norm": 0.7502411766292096, + "learning_rate": 6.2400739106742545e-06, + "loss": 1.2608, + "step": 1247 + }, + { + "epoch": 2.3955832933269323, + "grad_norm": 0.659943311180132, + "learning_rate": 6.233580824617147e-06, + "loss": 1.1978, + "step": 1248 + }, + { + "epoch": 2.3975036005760924, + "grad_norm": 0.5468106201549785, + "learning_rate": 6.227085522650253e-06, + "loss": 1.2264, + "step": 1249 + }, + { + "epoch": 2.399423907825252, + "grad_norm": 0.5859970642991462, + "learning_rate": 6.220588016441234e-06, + "loss": 1.3923, + "step": 1250 + }, + { + "epoch": 2.401344215074412, + "grad_norm": 0.6745672723394359, + "learning_rate": 6.214088317661709e-06, + "loss": 1.1152, + "step": 1251 + }, + { + "epoch": 2.403264522323572, + "grad_norm": 0.6158852563754607, + "learning_rate": 6.207586437987241e-06, + "loss": 1.3166, + "step": 1252 + }, + { + "epoch": 2.4051848295727316, + "grad_norm": 0.5730095887391516, + "learning_rate": 6.201082389097302e-06, + "loss": 1.1936, + "step": 1253 + }, + { + "epoch": 2.4071051368218916, + "grad_norm": 1.4640138016466724, + "learning_rate": 6.19457618267527e-06, + "loss": 1.2584, + "step": 1254 + }, + { + "epoch": 2.4090254440710512, + "grad_norm": 0.6349000539598749, + "learning_rate": 6.188067830408393e-06, + "loss": 1.224, + "step": 1255 + }, + { + "epoch": 2.4109457513202113, + "grad_norm": 0.6311572823390743, + "learning_rate": 6.181557343987775e-06, + "loss": 1.2915, + "step": 1256 + }, + { + "epoch": 2.412866058569371, + "grad_norm": 0.6695682124451278, + "learning_rate": 6.175044735108349e-06, + "loss": 1.2961, + "step": 1257 + }, + { + "epoch": 2.414786365818531, + "grad_norm": 0.6661777691771004, + "learning_rate": 6.168530015468872e-06, + "loss": 1.1564, + "step": 1258 + }, + { + "epoch": 2.416706673067691, + "grad_norm": 0.5769627139546278, + "learning_rate": 6.162013196771882e-06, + "loss": 1.1191, + "step": 1259 + }, + { + "epoch": 2.4186269803168505, + "grad_norm": 0.6696625061542401, + "learning_rate": 6.155494290723691e-06, + "loss": 1.2594, + "step": 1260 + }, + { + "epoch": 2.4205472875660106, + "grad_norm": 0.5824696247611385, + "learning_rate": 6.148973309034363e-06, + "loss": 1.1747, + "step": 1261 + }, + { + "epoch": 2.4224675948151706, + "grad_norm": 0.6813330398855266, + "learning_rate": 6.142450263417685e-06, + "loss": 1.2173, + "step": 1262 + }, + { + "epoch": 2.42438790206433, + "grad_norm": 0.5692154085477008, + "learning_rate": 6.135925165591159e-06, + "loss": 1.3443, + "step": 1263 + }, + { + "epoch": 2.4263082093134902, + "grad_norm": 0.5197059030883115, + "learning_rate": 6.129398027275966e-06, + "loss": 1.0257, + "step": 1264 + }, + { + "epoch": 2.4282285165626503, + "grad_norm": 0.7088728686250311, + "learning_rate": 6.122868860196956e-06, + "loss": 1.1714, + "step": 1265 + }, + { + "epoch": 2.43014882381181, + "grad_norm": 0.5794244685269138, + "learning_rate": 6.116337676082623e-06, + "loss": 1.2578, + "step": 1266 + }, + { + "epoch": 2.43206913106097, + "grad_norm": 0.5659150508785673, + "learning_rate": 6.109804486665085e-06, + "loss": 1.2957, + "step": 1267 + }, + { + "epoch": 2.4339894383101295, + "grad_norm": 0.6333267338148806, + "learning_rate": 6.103269303680063e-06, + "loss": 1.2752, + "step": 1268 + }, + { + "epoch": 2.4359097455592895, + "grad_norm": 0.5950812272774857, + "learning_rate": 6.0967321388668534e-06, + "loss": 1.2894, + "step": 1269 + }, + { + "epoch": 2.437830052808449, + "grad_norm": 0.6350175691173675, + "learning_rate": 6.090193003968319e-06, + "loss": 1.3265, + "step": 1270 + }, + { + "epoch": 2.439750360057609, + "grad_norm": 0.6837493426325877, + "learning_rate": 6.08365191073086e-06, + "loss": 1.1427, + "step": 1271 + }, + { + "epoch": 2.441670667306769, + "grad_norm": 0.5958537398944314, + "learning_rate": 6.0771088709043915e-06, + "loss": 1.1849, + "step": 1272 + }, + { + "epoch": 2.4435909745559288, + "grad_norm": 0.6021156128212082, + "learning_rate": 6.070563896242329e-06, + "loss": 1.2944, + "step": 1273 + }, + { + "epoch": 2.445511281805089, + "grad_norm": 0.6631372287185189, + "learning_rate": 6.064016998501563e-06, + "loss": 1.1679, + "step": 1274 + }, + { + "epoch": 2.447431589054249, + "grad_norm": 0.5836864312172045, + "learning_rate": 6.057468189442432e-06, + "loss": 1.1259, + "step": 1275 + }, + { + "epoch": 2.4493518963034084, + "grad_norm": 0.7125336969548751, + "learning_rate": 6.050917480828721e-06, + "loss": 1.4025, + "step": 1276 + }, + { + "epoch": 2.4512722035525685, + "grad_norm": 0.5999779991511556, + "learning_rate": 6.044364884427614e-06, + "loss": 1.2283, + "step": 1277 + }, + { + "epoch": 2.4531925108017285, + "grad_norm": 0.6228786103662747, + "learning_rate": 6.037810412009693e-06, + "loss": 1.1781, + "step": 1278 + }, + { + "epoch": 2.455112818050888, + "grad_norm": 0.5995133183671691, + "learning_rate": 6.031254075348908e-06, + "loss": 1.1651, + "step": 1279 + }, + { + "epoch": 2.457033125300048, + "grad_norm": 0.5726228213086509, + "learning_rate": 6.02469588622256e-06, + "loss": 1.2179, + "step": 1280 + }, + { + "epoch": 2.4589534325492077, + "grad_norm": 0.6319072116257335, + "learning_rate": 6.018135856411275e-06, + "loss": 1.0789, + "step": 1281 + }, + { + "epoch": 2.4608737397983678, + "grad_norm": 0.5791309734437495, + "learning_rate": 6.011573997698985e-06, + "loss": 1.233, + "step": 1282 + }, + { + "epoch": 2.4627940470475274, + "grad_norm": 0.6145255184440226, + "learning_rate": 6.00501032187291e-06, + "loss": 1.1686, + "step": 1283 + }, + { + "epoch": 2.4647143542966874, + "grad_norm": 0.5379184144373157, + "learning_rate": 5.998444840723534e-06, + "loss": 1.2602, + "step": 1284 + }, + { + "epoch": 2.4666346615458474, + "grad_norm": 0.6167816126301073, + "learning_rate": 5.991877566044581e-06, + "loss": 1.2921, + "step": 1285 + }, + { + "epoch": 2.468554968795007, + "grad_norm": 0.6535144856705375, + "learning_rate": 5.985308509633e-06, + "loss": 1.2201, + "step": 1286 + }, + { + "epoch": 2.470475276044167, + "grad_norm": 0.6860549323986097, + "learning_rate": 5.978737683288938e-06, + "loss": 1.2078, + "step": 1287 + }, + { + "epoch": 2.472395583293327, + "grad_norm": 0.6951627540400147, + "learning_rate": 5.972165098815721e-06, + "loss": 1.2361, + "step": 1288 + }, + { + "epoch": 2.4743158905424867, + "grad_norm": 0.6546505487989865, + "learning_rate": 5.965590768019838e-06, + "loss": 1.2895, + "step": 1289 + }, + { + "epoch": 2.4762361977916467, + "grad_norm": 0.6120831995174033, + "learning_rate": 5.959014702710908e-06, + "loss": 1.4001, + "step": 1290 + }, + { + "epoch": 2.4781565050408068, + "grad_norm": 0.6617422728749986, + "learning_rate": 5.952436914701673e-06, + "loss": 1.2279, + "step": 1291 + }, + { + "epoch": 2.4800768122899663, + "grad_norm": 0.6466808021093433, + "learning_rate": 5.945857415807962e-06, + "loss": 1.3116, + "step": 1292 + }, + { + "epoch": 2.4819971195391264, + "grad_norm": 0.5593849707459436, + "learning_rate": 5.939276217848684e-06, + "loss": 1.3044, + "step": 1293 + }, + { + "epoch": 2.483917426788286, + "grad_norm": 0.694337911560069, + "learning_rate": 5.932693332645796e-06, + "loss": 1.4405, + "step": 1294 + }, + { + "epoch": 2.485837734037446, + "grad_norm": 0.5796450029223225, + "learning_rate": 5.926108772024286e-06, + "loss": 1.3507, + "step": 1295 + }, + { + "epoch": 2.487758041286606, + "grad_norm": 0.6046024795406408, + "learning_rate": 5.919522547812155e-06, + "loss": 1.286, + "step": 1296 + }, + { + "epoch": 2.4896783485357656, + "grad_norm": 0.6493749876717334, + "learning_rate": 5.912934671840389e-06, + "loss": 1.2146, + "step": 1297 + }, + { + "epoch": 2.4915986557849257, + "grad_norm": 0.6796731338841863, + "learning_rate": 5.906345155942943e-06, + "loss": 1.4317, + "step": 1298 + }, + { + "epoch": 2.4935189630340853, + "grad_norm": 0.6860980566749547, + "learning_rate": 5.899754011956715e-06, + "loss": 1.2145, + "step": 1299 + }, + { + "epoch": 2.4954392702832453, + "grad_norm": 0.5565130162971886, + "learning_rate": 5.8931612517215305e-06, + "loss": 1.2637, + "step": 1300 + }, + { + "epoch": 2.4973595775324053, + "grad_norm": 0.6378841349295684, + "learning_rate": 5.886566887080117e-06, + "loss": 1.0994, + "step": 1301 + }, + { + "epoch": 2.499279884781565, + "grad_norm": 0.7116787998039626, + "learning_rate": 5.879970929878086e-06, + "loss": 1.3812, + "step": 1302 + }, + { + "epoch": 2.501200192030725, + "grad_norm": 0.6423843577571763, + "learning_rate": 5.873373391963906e-06, + "loss": 1.1543, + "step": 1303 + }, + { + "epoch": 2.503120499279885, + "grad_norm": 0.5726699800446029, + "learning_rate": 5.866774285188887e-06, + "loss": 1.3002, + "step": 1304 + }, + { + "epoch": 2.5050408065290446, + "grad_norm": 0.6330116873694995, + "learning_rate": 5.860173621407157e-06, + "loss": 1.239, + "step": 1305 + }, + { + "epoch": 2.5069611137782046, + "grad_norm": 0.6372043038208098, + "learning_rate": 5.853571412475644e-06, + "loss": 1.2213, + "step": 1306 + }, + { + "epoch": 2.5088814210273642, + "grad_norm": 0.6247955734407857, + "learning_rate": 5.8469676702540454e-06, + "loss": 1.2773, + "step": 1307 + }, + { + "epoch": 2.5108017282765243, + "grad_norm": 0.5817678497097796, + "learning_rate": 5.840362406604818e-06, + "loss": 1.2735, + "step": 1308 + }, + { + "epoch": 2.512722035525684, + "grad_norm": 0.5520561018680428, + "learning_rate": 5.83375563339315e-06, + "loss": 1.2459, + "step": 1309 + }, + { + "epoch": 2.514642342774844, + "grad_norm": 0.7410450237967919, + "learning_rate": 5.82714736248694e-06, + "loss": 1.1031, + "step": 1310 + }, + { + "epoch": 2.516562650024004, + "grad_norm": 0.5738368012193878, + "learning_rate": 5.820537605756778e-06, + "loss": 1.3073, + "step": 1311 + }, + { + "epoch": 2.5184829572731635, + "grad_norm": 0.5573105806604853, + "learning_rate": 5.813926375075924e-06, + "loss": 1.2266, + "step": 1312 + }, + { + "epoch": 2.5204032645223235, + "grad_norm": 0.6181886080856598, + "learning_rate": 5.807313682320285e-06, + "loss": 1.1996, + "step": 1313 + }, + { + "epoch": 2.5223235717714836, + "grad_norm": 0.6730335062822276, + "learning_rate": 5.800699539368391e-06, + "loss": 1.2198, + "step": 1314 + }, + { + "epoch": 2.524243879020643, + "grad_norm": 0.5605883111414951, + "learning_rate": 5.794083958101383e-06, + "loss": 1.2109, + "step": 1315 + }, + { + "epoch": 2.526164186269803, + "grad_norm": 0.6421642462562456, + "learning_rate": 5.7874669504029825e-06, + "loss": 1.0927, + "step": 1316 + }, + { + "epoch": 2.5280844935189632, + "grad_norm": 0.6376536627209224, + "learning_rate": 5.780848528159471e-06, + "loss": 1.1857, + "step": 1317 + }, + { + "epoch": 2.530004800768123, + "grad_norm": 0.6341601739426433, + "learning_rate": 5.774228703259678e-06, + "loss": 1.0717, + "step": 1318 + }, + { + "epoch": 2.531925108017283, + "grad_norm": 0.5804812110235914, + "learning_rate": 5.767607487594944e-06, + "loss": 1.2235, + "step": 1319 + }, + { + "epoch": 2.5338454152664425, + "grad_norm": 0.6154932839231937, + "learning_rate": 5.760984893059115e-06, + "loss": 1.3717, + "step": 1320 + }, + { + "epoch": 2.5357657225156025, + "grad_norm": 0.7574489368316524, + "learning_rate": 5.754360931548509e-06, + "loss": 1.2414, + "step": 1321 + }, + { + "epoch": 2.537686029764762, + "grad_norm": 0.5677124356572427, + "learning_rate": 5.747735614961902e-06, + "loss": 1.2608, + "step": 1322 + }, + { + "epoch": 2.539606337013922, + "grad_norm": 0.5704134007435095, + "learning_rate": 5.741108955200503e-06, + "loss": 1.3432, + "step": 1323 + }, + { + "epoch": 2.541526644263082, + "grad_norm": 0.5951256027367144, + "learning_rate": 5.734480964167935e-06, + "loss": 0.9872, + "step": 1324 + }, + { + "epoch": 2.5434469515122418, + "grad_norm": 0.6719946916626364, + "learning_rate": 5.727851653770211e-06, + "loss": 1.25, + "step": 1325 + }, + { + "epoch": 2.545367258761402, + "grad_norm": 0.6286872277200853, + "learning_rate": 5.721221035915717e-06, + "loss": 1.1818, + "step": 1326 + }, + { + "epoch": 2.547287566010562, + "grad_norm": 0.5265728959047694, + "learning_rate": 5.714589122515182e-06, + "loss": 1.3031, + "step": 1327 + }, + { + "epoch": 2.5492078732597214, + "grad_norm": 0.6295646479508796, + "learning_rate": 5.7079559254816665e-06, + "loss": 1.2045, + "step": 1328 + }, + { + "epoch": 2.5511281805088815, + "grad_norm": 0.6270913464833281, + "learning_rate": 5.701321456730536e-06, + "loss": 1.2163, + "step": 1329 + }, + { + "epoch": 2.5530484877580415, + "grad_norm": 0.6115400056563143, + "learning_rate": 5.694685728179442e-06, + "loss": 1.1537, + "step": 1330 + }, + { + "epoch": 2.554968795007201, + "grad_norm": 0.6602324772946387, + "learning_rate": 5.688048751748296e-06, + "loss": 1.1886, + "step": 1331 + }, + { + "epoch": 2.556889102256361, + "grad_norm": 0.6285231165205739, + "learning_rate": 5.681410539359251e-06, + "loss": 1.1562, + "step": 1332 + }, + { + "epoch": 2.558809409505521, + "grad_norm": 0.5580091730080278, + "learning_rate": 5.6747711029366845e-06, + "loss": 1.1137, + "step": 1333 + }, + { + "epoch": 2.5607297167546808, + "grad_norm": 0.6155227527633782, + "learning_rate": 5.668130454407168e-06, + "loss": 1.2266, + "step": 1334 + }, + { + "epoch": 2.5626500240038403, + "grad_norm": 0.7208056151770321, + "learning_rate": 5.661488605699451e-06, + "loss": 1.2748, + "step": 1335 + }, + { + "epoch": 2.5645703312530004, + "grad_norm": 0.6358039541543873, + "learning_rate": 5.654845568744443e-06, + "loss": 1.2893, + "step": 1336 + }, + { + "epoch": 2.5664906385021604, + "grad_norm": 0.6022914407629255, + "learning_rate": 5.648201355475182e-06, + "loss": 1.1789, + "step": 1337 + }, + { + "epoch": 2.56841094575132, + "grad_norm": 0.6261507684350669, + "learning_rate": 5.641555977826824e-06, + "loss": 1.2932, + "step": 1338 + }, + { + "epoch": 2.57033125300048, + "grad_norm": 0.6556797218150923, + "learning_rate": 5.634909447736614e-06, + "loss": 1.4065, + "step": 1339 + }, + { + "epoch": 2.57225156024964, + "grad_norm": 0.8088493377456971, + "learning_rate": 5.628261777143867e-06, + "loss": 1.1762, + "step": 1340 + }, + { + "epoch": 2.5741718674987997, + "grad_norm": 0.6106237399013169, + "learning_rate": 5.62161297798995e-06, + "loss": 1.2155, + "step": 1341 + }, + { + "epoch": 2.5760921747479597, + "grad_norm": 0.5620193635243992, + "learning_rate": 5.614963062218253e-06, + "loss": 1.1267, + "step": 1342 + }, + { + "epoch": 2.5780124819971197, + "grad_norm": 0.5916968354702812, + "learning_rate": 5.608312041774175e-06, + "loss": 1.3556, + "step": 1343 + }, + { + "epoch": 2.5799327892462793, + "grad_norm": 0.6222711849579262, + "learning_rate": 5.601659928605095e-06, + "loss": 1.2217, + "step": 1344 + }, + { + "epoch": 2.5818530964954394, + "grad_norm": 0.7012656976911378, + "learning_rate": 5.5950067346603595e-06, + "loss": 1.2048, + "step": 1345 + }, + { + "epoch": 2.5837734037445994, + "grad_norm": 0.5879328784503015, + "learning_rate": 5.588352471891259e-06, + "loss": 1.3748, + "step": 1346 + }, + { + "epoch": 2.585693710993759, + "grad_norm": 0.6321780544880533, + "learning_rate": 5.581697152250992e-06, + "loss": 1.3289, + "step": 1347 + }, + { + "epoch": 2.5876140182429186, + "grad_norm": 0.6604594554850995, + "learning_rate": 5.575040787694668e-06, + "loss": 1.2861, + "step": 1348 + }, + { + "epoch": 2.5895343254920786, + "grad_norm": 0.7738496596525316, + "learning_rate": 5.568383390179267e-06, + "loss": 1.371, + "step": 1349 + }, + { + "epoch": 2.5914546327412387, + "grad_norm": 0.561705763795756, + "learning_rate": 5.561724971663628e-06, + "loss": 1.4532, + "step": 1350 + }, + { + "epoch": 2.5933749399903983, + "grad_norm": 0.6718027044013396, + "learning_rate": 5.55506554410842e-06, + "loss": 1.276, + "step": 1351 + }, + { + "epoch": 2.5952952472395583, + "grad_norm": 0.6972126937496573, + "learning_rate": 5.548405119476129e-06, + "loss": 1.2157, + "step": 1352 + }, + { + "epoch": 2.5972155544887183, + "grad_norm": 0.5943209839227868, + "learning_rate": 5.541743709731029e-06, + "loss": 1.2271, + "step": 1353 + }, + { + "epoch": 2.599135861737878, + "grad_norm": 0.6646420886497804, + "learning_rate": 5.535081326839165e-06, + "loss": 1.3276, + "step": 1354 + }, + { + "epoch": 2.601056168987038, + "grad_norm": 0.6178199961209532, + "learning_rate": 5.528417982768328e-06, + "loss": 1.2772, + "step": 1355 + }, + { + "epoch": 2.602976476236198, + "grad_norm": 0.6774402354328428, + "learning_rate": 5.521753689488039e-06, + "loss": 1.2322, + "step": 1356 + }, + { + "epoch": 2.6048967834853576, + "grad_norm": 0.5815814662153418, + "learning_rate": 5.515088458969522e-06, + "loss": 1.1769, + "step": 1357 + }, + { + "epoch": 2.6068170907345176, + "grad_norm": 0.6922973775030319, + "learning_rate": 5.508422303185682e-06, + "loss": 1.2962, + "step": 1358 + }, + { + "epoch": 2.6087373979836777, + "grad_norm": 0.6665626499511711, + "learning_rate": 5.501755234111095e-06, + "loss": 1.3545, + "step": 1359 + }, + { + "epoch": 2.6106577052328372, + "grad_norm": 0.735542600803809, + "learning_rate": 5.495087263721965e-06, + "loss": 1.1893, + "step": 1360 + }, + { + "epoch": 2.6125780124819973, + "grad_norm": 0.5238670939024578, + "learning_rate": 5.488418403996125e-06, + "loss": 1.1627, + "step": 1361 + }, + { + "epoch": 2.614498319731157, + "grad_norm": 0.6210197741128046, + "learning_rate": 5.481748666913001e-06, + "loss": 1.1276, + "step": 1362 + }, + { + "epoch": 2.616418626980317, + "grad_norm": 0.5963335444059878, + "learning_rate": 5.4750780644535975e-06, + "loss": 1.0468, + "step": 1363 + }, + { + "epoch": 2.6183389342294765, + "grad_norm": 0.628355725486478, + "learning_rate": 5.46840660860047e-06, + "loss": 1.309, + "step": 1364 + }, + { + "epoch": 2.6202592414786365, + "grad_norm": 0.5712468661870007, + "learning_rate": 5.461734311337712e-06, + "loss": 1.2055, + "step": 1365 + }, + { + "epoch": 2.6221795487277966, + "grad_norm": 0.6550614152257059, + "learning_rate": 5.455061184650921e-06, + "loss": 1.1254, + "step": 1366 + }, + { + "epoch": 2.624099855976956, + "grad_norm": 0.5857760293047071, + "learning_rate": 5.448387240527195e-06, + "loss": 1.2524, + "step": 1367 + }, + { + "epoch": 2.626020163226116, + "grad_norm": 0.5996253176790339, + "learning_rate": 5.441712490955088e-06, + "loss": 1.1753, + "step": 1368 + }, + { + "epoch": 2.6279404704752762, + "grad_norm": 0.5192831528485529, + "learning_rate": 5.435036947924611e-06, + "loss": 1.2642, + "step": 1369 + }, + { + "epoch": 2.629860777724436, + "grad_norm": 0.6322581887680471, + "learning_rate": 5.4283606234271955e-06, + "loss": 1.1797, + "step": 1370 + }, + { + "epoch": 2.631781084973596, + "grad_norm": 0.6208687760914947, + "learning_rate": 5.421683529455677e-06, + "loss": 1.2889, + "step": 1371 + }, + { + "epoch": 2.633701392222756, + "grad_norm": 0.6024350882429259, + "learning_rate": 5.415005678004277e-06, + "loss": 1.2419, + "step": 1372 + }, + { + "epoch": 2.6356216994719155, + "grad_norm": 0.5985715149546723, + "learning_rate": 5.40832708106857e-06, + "loss": 1.3948, + "step": 1373 + }, + { + "epoch": 2.6375420067210755, + "grad_norm": 0.6196127560581245, + "learning_rate": 5.401647750645477e-06, + "loss": 1.4807, + "step": 1374 + }, + { + "epoch": 2.639462313970235, + "grad_norm": 0.64681914597301, + "learning_rate": 5.394967698733234e-06, + "loss": 1.1374, + "step": 1375 + }, + { + "epoch": 2.641382621219395, + "grad_norm": 0.4890585689575943, + "learning_rate": 5.38828693733137e-06, + "loss": 1.1564, + "step": 1376 + }, + { + "epoch": 2.6433029284685547, + "grad_norm": 0.5630617563950847, + "learning_rate": 5.381605478440696e-06, + "loss": 1.2389, + "step": 1377 + }, + { + "epoch": 2.645223235717715, + "grad_norm": 0.5788570414678232, + "learning_rate": 5.3749233340632676e-06, + "loss": 1.2963, + "step": 1378 + }, + { + "epoch": 2.647143542966875, + "grad_norm": 0.64567790799067, + "learning_rate": 5.368240516202376e-06, + "loss": 1.1306, + "step": 1379 + }, + { + "epoch": 2.6490638502160344, + "grad_norm": 0.6394046194644902, + "learning_rate": 5.3615570368625235e-06, + "loss": 1.3908, + "step": 1380 + }, + { + "epoch": 2.6509841574651944, + "grad_norm": 0.6542343993204641, + "learning_rate": 5.354872908049398e-06, + "loss": 1.3043, + "step": 1381 + }, + { + "epoch": 2.6529044647143545, + "grad_norm": 0.5928443232243815, + "learning_rate": 5.348188141769852e-06, + "loss": 1.1948, + "step": 1382 + }, + { + "epoch": 2.654824771963514, + "grad_norm": 0.6634119943181676, + "learning_rate": 5.34150275003189e-06, + "loss": 1.2738, + "step": 1383 + }, + { + "epoch": 2.656745079212674, + "grad_norm": 0.6175973847987798, + "learning_rate": 5.334816744844633e-06, + "loss": 1.2039, + "step": 1384 + }, + { + "epoch": 2.658665386461834, + "grad_norm": 0.5328533760172757, + "learning_rate": 5.328130138218309e-06, + "loss": 1.168, + "step": 1385 + }, + { + "epoch": 2.6605856937109937, + "grad_norm": 0.6333132588626341, + "learning_rate": 5.3214429421642224e-06, + "loss": 1.2574, + "step": 1386 + }, + { + "epoch": 2.6625060009601538, + "grad_norm": 0.6141814037172568, + "learning_rate": 5.3147551686947385e-06, + "loss": 1.3352, + "step": 1387 + }, + { + "epoch": 2.6644263082093134, + "grad_norm": 0.6628973771397662, + "learning_rate": 5.308066829823261e-06, + "loss": 1.164, + "step": 1388 + }, + { + "epoch": 2.6663466154584734, + "grad_norm": 0.6856680778100335, + "learning_rate": 5.301377937564205e-06, + "loss": 1.1983, + "step": 1389 + }, + { + "epoch": 2.668266922707633, + "grad_norm": 0.5261090189495043, + "learning_rate": 5.294688503932986e-06, + "loss": 1.4359, + "step": 1390 + }, + { + "epoch": 2.670187229956793, + "grad_norm": 0.6886373426064217, + "learning_rate": 5.287998540945987e-06, + "loss": 1.271, + "step": 1391 + }, + { + "epoch": 2.672107537205953, + "grad_norm": 0.7012339259498463, + "learning_rate": 5.281308060620543e-06, + "loss": 1.2969, + "step": 1392 + }, + { + "epoch": 2.6740278444551127, + "grad_norm": 0.7355650125399332, + "learning_rate": 5.274617074974918e-06, + "loss": 1.1504, + "step": 1393 + }, + { + "epoch": 2.6759481517042727, + "grad_norm": 0.5562169304602139, + "learning_rate": 5.267925596028285e-06, + "loss": 1.0409, + "step": 1394 + }, + { + "epoch": 2.6778684589534327, + "grad_norm": 0.5919913846193492, + "learning_rate": 5.2612336358007035e-06, + "loss": 1.2113, + "step": 1395 + }, + { + "epoch": 2.6797887662025923, + "grad_norm": 0.6692107545177737, + "learning_rate": 5.2545412063130964e-06, + "loss": 1.1718, + "step": 1396 + }, + { + "epoch": 2.6817090734517524, + "grad_norm": 0.6418333213055866, + "learning_rate": 5.247848319587226e-06, + "loss": 1.2882, + "step": 1397 + }, + { + "epoch": 2.6836293807009124, + "grad_norm": 0.6934356927705114, + "learning_rate": 5.241154987645687e-06, + "loss": 1.2716, + "step": 1398 + }, + { + "epoch": 2.685549687950072, + "grad_norm": 0.5551620206008213, + "learning_rate": 5.234461222511858e-06, + "loss": 1.3031, + "step": 1399 + }, + { + "epoch": 2.687469995199232, + "grad_norm": 0.6360668296023374, + "learning_rate": 5.227767036209911e-06, + "loss": 1.1746, + "step": 1400 + }, + { + "epoch": 2.6893903024483916, + "grad_norm": 0.6995023935942758, + "learning_rate": 5.221072440764765e-06, + "loss": 1.1183, + "step": 1401 + }, + { + "epoch": 2.6913106096975516, + "grad_norm": 0.5654432672068804, + "learning_rate": 5.214377448202075e-06, + "loss": 1.2492, + "step": 1402 + }, + { + "epoch": 2.6932309169467112, + "grad_norm": 0.6937559082305748, + "learning_rate": 5.2076820705482155e-06, + "loss": 1.2054, + "step": 1403 + }, + { + "epoch": 2.6951512241958713, + "grad_norm": 0.6365688104050511, + "learning_rate": 5.200986319830245e-06, + "loss": 1.3126, + "step": 1404 + }, + { + "epoch": 2.6970715314450313, + "grad_norm": 0.6076408603108523, + "learning_rate": 5.194290208075896e-06, + "loss": 1.4652, + "step": 1405 + }, + { + "epoch": 2.698991838694191, + "grad_norm": 0.7778885537673794, + "learning_rate": 5.18759374731355e-06, + "loss": 1.346, + "step": 1406 + }, + { + "epoch": 2.700912145943351, + "grad_norm": 0.5841648932428964, + "learning_rate": 5.180896949572213e-06, + "loss": 1.4298, + "step": 1407 + }, + { + "epoch": 2.702832453192511, + "grad_norm": 0.6337842674058476, + "learning_rate": 5.174199826881498e-06, + "loss": 1.2634, + "step": 1408 + }, + { + "epoch": 2.7047527604416706, + "grad_norm": 0.5528118794971537, + "learning_rate": 5.167502391271603e-06, + "loss": 1.1476, + "step": 1409 + }, + { + "epoch": 2.7066730676908306, + "grad_norm": 0.6428624442509177, + "learning_rate": 5.160804654773286e-06, + "loss": 1.1906, + "step": 1410 + }, + { + "epoch": 2.7085933749399906, + "grad_norm": 0.5508196505900297, + "learning_rate": 5.154106629417845e-06, + "loss": 1.1828, + "step": 1411 + }, + { + "epoch": 2.7105136821891502, + "grad_norm": 0.5399557474898792, + "learning_rate": 5.147408327237099e-06, + "loss": 1.1979, + "step": 1412 + }, + { + "epoch": 2.7124339894383103, + "grad_norm": 0.5785664255689172, + "learning_rate": 5.140709760263364e-06, + "loss": 1.2234, + "step": 1413 + }, + { + "epoch": 2.71435429668747, + "grad_norm": 0.5862267357113349, + "learning_rate": 5.134010940529429e-06, + "loss": 1.3416, + "step": 1414 + }, + { + "epoch": 2.71627460393663, + "grad_norm": 0.5962477885128445, + "learning_rate": 5.127311880068539e-06, + "loss": 1.1983, + "step": 1415 + }, + { + "epoch": 2.7181949111857895, + "grad_norm": 0.6863189095052216, + "learning_rate": 5.1206125909143745e-06, + "loss": 1.3491, + "step": 1416 + }, + { + "epoch": 2.7201152184349495, + "grad_norm": 0.5885538990134215, + "learning_rate": 5.11391308510102e-06, + "loss": 1.1574, + "step": 1417 + }, + { + "epoch": 2.7220355256841096, + "grad_norm": 0.6104132348781779, + "learning_rate": 5.107213374662954e-06, + "loss": 1.2853, + "step": 1418 + }, + { + "epoch": 2.723955832933269, + "grad_norm": 0.6153215004897434, + "learning_rate": 5.100513471635022e-06, + "loss": 1.0575, + "step": 1419 + }, + { + "epoch": 2.725876140182429, + "grad_norm": 0.6305261136020173, + "learning_rate": 5.0938133880524145e-06, + "loss": 1.1139, + "step": 1420 + }, + { + "epoch": 2.727796447431589, + "grad_norm": 0.6887979401369777, + "learning_rate": 5.087113135950646e-06, + "loss": 1.2312, + "step": 1421 + }, + { + "epoch": 2.729716754680749, + "grad_norm": 0.54068610955665, + "learning_rate": 5.080412727365536e-06, + "loss": 1.0664, + "step": 1422 + }, + { + "epoch": 2.731637061929909, + "grad_norm": 0.6171721436312619, + "learning_rate": 5.073712174333182e-06, + "loss": 1.3482, + "step": 1423 + }, + { + "epoch": 2.733557369179069, + "grad_norm": 0.6148374132384409, + "learning_rate": 5.067011488889944e-06, + "loss": 1.1996, + "step": 1424 + }, + { + "epoch": 2.7354776764282285, + "grad_norm": 0.5915123714502185, + "learning_rate": 5.060310683072417e-06, + "loss": 1.3484, + "step": 1425 + }, + { + "epoch": 2.7373979836773885, + "grad_norm": 0.5320933394573898, + "learning_rate": 5.053609768917414e-06, + "loss": 1.3501, + "step": 1426 + }, + { + "epoch": 2.739318290926548, + "grad_norm": 0.6105600950051874, + "learning_rate": 5.0469087584619435e-06, + "loss": 1.0733, + "step": 1427 + }, + { + "epoch": 2.741238598175708, + "grad_norm": 0.5991959421709251, + "learning_rate": 5.040207663743182e-06, + "loss": 1.2451, + "step": 1428 + }, + { + "epoch": 2.7431589054248677, + "grad_norm": 0.6231899390190498, + "learning_rate": 5.033506496798466e-06, + "loss": 1.2847, + "step": 1429 + }, + { + "epoch": 2.7450792126740278, + "grad_norm": 0.5893989916209164, + "learning_rate": 5.026805269665254e-06, + "loss": 1.2254, + "step": 1430 + }, + { + "epoch": 2.746999519923188, + "grad_norm": 0.6084702626959012, + "learning_rate": 5.020103994381114e-06, + "loss": 1.2172, + "step": 1431 + }, + { + "epoch": 2.7489198271723474, + "grad_norm": 0.6097212003787716, + "learning_rate": 5.013402682983705e-06, + "loss": 1.1736, + "step": 1432 + }, + { + "epoch": 2.7508401344215074, + "grad_norm": 0.6318957281734904, + "learning_rate": 5.006701347510745e-06, + "loss": 1.3208, + "step": 1433 + }, + { + "epoch": 2.7527604416706675, + "grad_norm": 0.5993572834373231, + "learning_rate": 5e-06, + "loss": 1.3066, + "step": 1434 + }, + { + "epoch": 2.754680748919827, + "grad_norm": 0.650769528122097, + "learning_rate": 4.9932986524892554e-06, + "loss": 1.3586, + "step": 1435 + }, + { + "epoch": 2.756601056168987, + "grad_norm": 0.5228860007583129, + "learning_rate": 4.986597317016298e-06, + "loss": 1.2146, + "step": 1436 + }, + { + "epoch": 2.758521363418147, + "grad_norm": 0.6789312600215679, + "learning_rate": 4.979896005618887e-06, + "loss": 1.1601, + "step": 1437 + }, + { + "epoch": 2.7604416706673067, + "grad_norm": 0.6510933300140923, + "learning_rate": 4.9731947303347485e-06, + "loss": 1.169, + "step": 1438 + }, + { + "epoch": 2.7623619779164668, + "grad_norm": 0.4995020628757473, + "learning_rate": 4.966493503201537e-06, + "loss": 1.17, + "step": 1439 + }, + { + "epoch": 2.7642822851656264, + "grad_norm": 0.5852462719837537, + "learning_rate": 4.959792336256819e-06, + "loss": 1.307, + "step": 1440 + }, + { + "epoch": 2.7662025924147864, + "grad_norm": 0.5619937862506597, + "learning_rate": 4.953091241538058e-06, + "loss": 1.2523, + "step": 1441 + }, + { + "epoch": 2.768122899663946, + "grad_norm": 0.5729798655879715, + "learning_rate": 4.946390231082586e-06, + "loss": 1.1788, + "step": 1442 + }, + { + "epoch": 2.770043206913106, + "grad_norm": 0.6120211741613024, + "learning_rate": 4.939689316927584e-06, + "loss": 1.1841, + "step": 1443 + }, + { + "epoch": 2.771963514162266, + "grad_norm": 0.6194227496714729, + "learning_rate": 4.932988511110058e-06, + "loss": 1.1851, + "step": 1444 + }, + { + "epoch": 2.7738838214114256, + "grad_norm": 0.6589157606436065, + "learning_rate": 4.926287825666818e-06, + "loss": 1.328, + "step": 1445 + }, + { + "epoch": 2.7758041286605857, + "grad_norm": 0.5867467759483119, + "learning_rate": 4.919587272634466e-06, + "loss": 1.2535, + "step": 1446 + }, + { + "epoch": 2.7777244359097457, + "grad_norm": 0.6274013769868285, + "learning_rate": 4.9128868640493556e-06, + "loss": 1.239, + "step": 1447 + }, + { + "epoch": 2.7796447431589053, + "grad_norm": 0.5473097320742439, + "learning_rate": 4.906186611947587e-06, + "loss": 1.2383, + "step": 1448 + }, + { + "epoch": 2.7815650504080653, + "grad_norm": 0.6396556685335861, + "learning_rate": 4.89948652836498e-06, + "loss": 1.2159, + "step": 1449 + }, + { + "epoch": 2.7834853576572254, + "grad_norm": 0.5649886726795607, + "learning_rate": 4.892786625337047e-06, + "loss": 1.2335, + "step": 1450 + }, + { + "epoch": 2.785405664906385, + "grad_norm": 0.591568752371345, + "learning_rate": 4.886086914898982e-06, + "loss": 1.2937, + "step": 1451 + }, + { + "epoch": 2.787325972155545, + "grad_norm": 0.6259062421801402, + "learning_rate": 4.879387409085628e-06, + "loss": 1.3336, + "step": 1452 + }, + { + "epoch": 2.7892462794047046, + "grad_norm": 0.6111028684284887, + "learning_rate": 4.8726881199314615e-06, + "loss": 1.3996, + "step": 1453 + }, + { + "epoch": 2.7911665866538646, + "grad_norm": 0.6073121787268624, + "learning_rate": 4.865989059470572e-06, + "loss": 1.2477, + "step": 1454 + }, + { + "epoch": 2.7930868939030242, + "grad_norm": 0.6160489072141255, + "learning_rate": 4.859290239736637e-06, + "loss": 1.2308, + "step": 1455 + }, + { + "epoch": 2.7950072011521843, + "grad_norm": 0.616064300242587, + "learning_rate": 4.8525916727629025e-06, + "loss": 1.4222, + "step": 1456 + }, + { + "epoch": 2.7969275084013443, + "grad_norm": 0.599425508718928, + "learning_rate": 4.845893370582156e-06, + "loss": 1.2779, + "step": 1457 + }, + { + "epoch": 2.798847815650504, + "grad_norm": 0.621016471101322, + "learning_rate": 4.839195345226715e-06, + "loss": 1.2773, + "step": 1458 + }, + { + "epoch": 2.800768122899664, + "grad_norm": 0.6134704585567262, + "learning_rate": 4.832497608728398e-06, + "loss": 0.9875, + "step": 1459 + }, + { + "epoch": 2.802688430148824, + "grad_norm": 0.6274670165366076, + "learning_rate": 4.825800173118503e-06, + "loss": 1.3622, + "step": 1460 + }, + { + "epoch": 2.8046087373979836, + "grad_norm": 0.5740419397746597, + "learning_rate": 4.819103050427788e-06, + "loss": 1.2185, + "step": 1461 + }, + { + "epoch": 2.8065290446471436, + "grad_norm": 0.5571624343029756, + "learning_rate": 4.812406252686453e-06, + "loss": 1.2336, + "step": 1462 + }, + { + "epoch": 2.8084493518963036, + "grad_norm": 0.7028307041043893, + "learning_rate": 4.805709791924106e-06, + "loss": 1.2159, + "step": 1463 + }, + { + "epoch": 2.810369659145463, + "grad_norm": 0.5857678198418313, + "learning_rate": 4.799013680169757e-06, + "loss": 1.3844, + "step": 1464 + }, + { + "epoch": 2.8122899663946233, + "grad_norm": 0.5664125125624209, + "learning_rate": 4.792317929451787e-06, + "loss": 1.3233, + "step": 1465 + }, + { + "epoch": 2.814210273643783, + "grad_norm": 0.6736424995932656, + "learning_rate": 4.785622551797926e-06, + "loss": 1.174, + "step": 1466 + }, + { + "epoch": 2.816130580892943, + "grad_norm": 0.6185112123870505, + "learning_rate": 4.778927559235236e-06, + "loss": 1.169, + "step": 1467 + }, + { + "epoch": 2.8180508881421025, + "grad_norm": 0.6282965696796463, + "learning_rate": 4.7722329637900895e-06, + "loss": 1.3449, + "step": 1468 + }, + { + "epoch": 2.8199711953912625, + "grad_norm": 0.595392473019611, + "learning_rate": 4.765538777488143e-06, + "loss": 1.3191, + "step": 1469 + }, + { + "epoch": 2.8218915026404225, + "grad_norm": 0.6339955968714532, + "learning_rate": 4.758845012354314e-06, + "loss": 1.1657, + "step": 1470 + }, + { + "epoch": 2.823811809889582, + "grad_norm": 0.5884945613572157, + "learning_rate": 4.752151680412774e-06, + "loss": 1.1951, + "step": 1471 + }, + { + "epoch": 2.825732117138742, + "grad_norm": 0.570157509946084, + "learning_rate": 4.745458793686906e-06, + "loss": 1.0683, + "step": 1472 + }, + { + "epoch": 2.827652424387902, + "grad_norm": 0.5701706927749041, + "learning_rate": 4.738766364199298e-06, + "loss": 1.2678, + "step": 1473 + }, + { + "epoch": 2.829572731637062, + "grad_norm": 0.6821992872870765, + "learning_rate": 4.732074403971716e-06, + "loss": 1.0771, + "step": 1474 + }, + { + "epoch": 2.831493038886222, + "grad_norm": 0.5742447400668917, + "learning_rate": 4.725382925025085e-06, + "loss": 1.3243, + "step": 1475 + }, + { + "epoch": 2.833413346135382, + "grad_norm": 0.5073154580413796, + "learning_rate": 4.718691939379459e-06, + "loss": 1.1815, + "step": 1476 + }, + { + "epoch": 2.8353336533845415, + "grad_norm": 0.6281127816737985, + "learning_rate": 4.712001459054015e-06, + "loss": 1.2478, + "step": 1477 + }, + { + "epoch": 2.8372539606337015, + "grad_norm": 0.6280724554540403, + "learning_rate": 4.705311496067016e-06, + "loss": 1.2767, + "step": 1478 + }, + { + "epoch": 2.839174267882861, + "grad_norm": 0.6498477298816885, + "learning_rate": 4.698622062435797e-06, + "loss": 1.4186, + "step": 1479 + }, + { + "epoch": 2.841094575132021, + "grad_norm": 0.6271695380655552, + "learning_rate": 4.691933170176741e-06, + "loss": 1.2015, + "step": 1480 + }, + { + "epoch": 2.8430148823811807, + "grad_norm": 0.5491548417377377, + "learning_rate": 4.685244831305262e-06, + "loss": 1.2355, + "step": 1481 + }, + { + "epoch": 2.8449351896303408, + "grad_norm": 0.5864034248211509, + "learning_rate": 4.67855705783578e-06, + "loss": 1.341, + "step": 1482 + }, + { + "epoch": 2.846855496879501, + "grad_norm": 0.651269297725374, + "learning_rate": 4.671869861781692e-06, + "loss": 1.2618, + "step": 1483 + }, + { + "epoch": 2.8487758041286604, + "grad_norm": 0.5736137573243583, + "learning_rate": 4.665183255155367e-06, + "loss": 1.1813, + "step": 1484 + }, + { + "epoch": 2.8506961113778204, + "grad_norm": 0.5966060659621522, + "learning_rate": 4.658497249968111e-06, + "loss": 1.2838, + "step": 1485 + }, + { + "epoch": 2.8526164186269805, + "grad_norm": 0.6436473495005255, + "learning_rate": 4.651811858230149e-06, + "loss": 1.1568, + "step": 1486 + }, + { + "epoch": 2.85453672587614, + "grad_norm": 0.6006426997501972, + "learning_rate": 4.645127091950603e-06, + "loss": 1.0614, + "step": 1487 + }, + { + "epoch": 2.8564570331253, + "grad_norm": 0.563971575777618, + "learning_rate": 4.638442963137478e-06, + "loss": 1.1671, + "step": 1488 + }, + { + "epoch": 2.85837734037446, + "grad_norm": 0.6062151438031776, + "learning_rate": 4.631759483797625e-06, + "loss": 1.2848, + "step": 1489 + }, + { + "epoch": 2.8602976476236197, + "grad_norm": 0.5577440382869018, + "learning_rate": 4.625076665936733e-06, + "loss": 0.9997, + "step": 1490 + }, + { + "epoch": 2.8622179548727797, + "grad_norm": 0.5757168561642692, + "learning_rate": 4.6183945215593065e-06, + "loss": 1.2167, + "step": 1491 + }, + { + "epoch": 2.86413826212194, + "grad_norm": 0.5328563205215284, + "learning_rate": 4.6117130626686304e-06, + "loss": 1.1324, + "step": 1492 + }, + { + "epoch": 2.8660585693710994, + "grad_norm": 0.5623264525950473, + "learning_rate": 4.605032301266768e-06, + "loss": 1.1798, + "step": 1493 + }, + { + "epoch": 2.867978876620259, + "grad_norm": 0.6044089763292664, + "learning_rate": 4.5983522493545246e-06, + "loss": 1.2068, + "step": 1494 + }, + { + "epoch": 2.869899183869419, + "grad_norm": 0.5847969602032593, + "learning_rate": 4.591672918931433e-06, + "loss": 1.3445, + "step": 1495 + }, + { + "epoch": 2.871819491118579, + "grad_norm": 0.7719499870853559, + "learning_rate": 4.584994321995725e-06, + "loss": 1.2524, + "step": 1496 + }, + { + "epoch": 2.8737397983677386, + "grad_norm": 0.6664338376156864, + "learning_rate": 4.578316470544323e-06, + "loss": 1.4155, + "step": 1497 + }, + { + "epoch": 2.8756601056168987, + "grad_norm": 0.7115327797700147, + "learning_rate": 4.571639376572806e-06, + "loss": 1.2976, + "step": 1498 + }, + { + "epoch": 2.8775804128660587, + "grad_norm": 0.613845802385372, + "learning_rate": 4.564963052075391e-06, + "loss": 1.1501, + "step": 1499 + }, + { + "epoch": 2.8795007201152183, + "grad_norm": 0.581581397000781, + "learning_rate": 4.558287509044913e-06, + "loss": 1.1418, + "step": 1500 + }, + { + "epoch": 2.8814210273643783, + "grad_norm": 0.6661672047569515, + "learning_rate": 4.551612759472808e-06, + "loss": 1.2158, + "step": 1501 + }, + { + "epoch": 2.8833413346135384, + "grad_norm": 0.5811427703283559, + "learning_rate": 4.544938815349079e-06, + "loss": 1.0787, + "step": 1502 + }, + { + "epoch": 2.885261641862698, + "grad_norm": 0.6076712464127186, + "learning_rate": 4.53826568866229e-06, + "loss": 1.3383, + "step": 1503 + }, + { + "epoch": 2.887181949111858, + "grad_norm": 0.6240599236817694, + "learning_rate": 4.531593391399532e-06, + "loss": 1.1371, + "step": 1504 + }, + { + "epoch": 2.889102256361018, + "grad_norm": 0.5834223060081416, + "learning_rate": 4.524921935546403e-06, + "loss": 1.1745, + "step": 1505 + }, + { + "epoch": 2.8910225636101776, + "grad_norm": 0.5520494475127871, + "learning_rate": 4.5182513330869996e-06, + "loss": 1.2237, + "step": 1506 + }, + { + "epoch": 2.8929428708593377, + "grad_norm": 0.6261061301979235, + "learning_rate": 4.511581596003876e-06, + "loss": 1.2101, + "step": 1507 + }, + { + "epoch": 2.8948631781084972, + "grad_norm": 0.5647265473440193, + "learning_rate": 4.504912736278038e-06, + "loss": 1.3304, + "step": 1508 + }, + { + "epoch": 2.8967834853576573, + "grad_norm": 0.5480899790116897, + "learning_rate": 4.498244765888907e-06, + "loss": 1.1861, + "step": 1509 + }, + { + "epoch": 2.898703792606817, + "grad_norm": 0.6563010965058005, + "learning_rate": 4.491577696814318e-06, + "loss": 1.1922, + "step": 1510 + }, + { + "epoch": 2.900624099855977, + "grad_norm": 0.6608401850521457, + "learning_rate": 4.484911541030481e-06, + "loss": 1.2171, + "step": 1511 + }, + { + "epoch": 2.902544407105137, + "grad_norm": 0.579456906409508, + "learning_rate": 4.478246310511963e-06, + "loss": 1.208, + "step": 1512 + }, + { + "epoch": 2.9044647143542965, + "grad_norm": 0.5845531368383728, + "learning_rate": 4.471582017231673e-06, + "loss": 1.2664, + "step": 1513 + }, + { + "epoch": 2.9063850216034566, + "grad_norm": 0.6093436425644907, + "learning_rate": 4.464918673160837e-06, + "loss": 1.4305, + "step": 1514 + }, + { + "epoch": 2.9083053288526166, + "grad_norm": 0.610189940208305, + "learning_rate": 4.4582562902689726e-06, + "loss": 1.1343, + "step": 1515 + }, + { + "epoch": 2.910225636101776, + "grad_norm": 0.5634128751018531, + "learning_rate": 4.451594880523872e-06, + "loss": 1.2433, + "step": 1516 + }, + { + "epoch": 2.9121459433509362, + "grad_norm": 0.5726466147422486, + "learning_rate": 4.44493445589158e-06, + "loss": 1.3141, + "step": 1517 + }, + { + "epoch": 2.9140662506000963, + "grad_norm": 0.623536692319963, + "learning_rate": 4.438275028336374e-06, + "loss": 1.2369, + "step": 1518 + }, + { + "epoch": 2.915986557849256, + "grad_norm": 0.6090348325148056, + "learning_rate": 4.431616609820734e-06, + "loss": 1.3005, + "step": 1519 + }, + { + "epoch": 2.917906865098416, + "grad_norm": 0.5454657429606619, + "learning_rate": 4.424959212305334e-06, + "loss": 1.1802, + "step": 1520 + }, + { + "epoch": 2.9198271723475755, + "grad_norm": 0.5865537935296061, + "learning_rate": 4.4183028477490104e-06, + "loss": 1.2783, + "step": 1521 + }, + { + "epoch": 2.9217474795967355, + "grad_norm": 0.6401005519806768, + "learning_rate": 4.411647528108744e-06, + "loss": 1.2738, + "step": 1522 + }, + { + "epoch": 2.923667786845895, + "grad_norm": 0.654060271373098, + "learning_rate": 4.40499326533964e-06, + "loss": 1.3858, + "step": 1523 + }, + { + "epoch": 2.925588094095055, + "grad_norm": 0.5852133615331452, + "learning_rate": 4.398340071394906e-06, + "loss": 1.1804, + "step": 1524 + }, + { + "epoch": 2.927508401344215, + "grad_norm": 0.593873231371995, + "learning_rate": 4.391687958225828e-06, + "loss": 1.2319, + "step": 1525 + }, + { + "epoch": 2.929428708593375, + "grad_norm": 0.5987318760689788, + "learning_rate": 4.385036937781747e-06, + "loss": 1.358, + "step": 1526 + }, + { + "epoch": 2.931349015842535, + "grad_norm": 0.5920609101980319, + "learning_rate": 4.378387022010051e-06, + "loss": 1.2592, + "step": 1527 + }, + { + "epoch": 2.933269323091695, + "grad_norm": 0.5531012218038981, + "learning_rate": 4.371738222856134e-06, + "loss": 1.156, + "step": 1528 + }, + { + "epoch": 2.9351896303408544, + "grad_norm": 0.7449451350986409, + "learning_rate": 4.365090552263388e-06, + "loss": 1.342, + "step": 1529 + }, + { + "epoch": 2.9371099375900145, + "grad_norm": 0.6024741270458807, + "learning_rate": 4.358444022173177e-06, + "loss": 1.173, + "step": 1530 + }, + { + "epoch": 2.9390302448391745, + "grad_norm": 0.5819701472541976, + "learning_rate": 4.351798644524819e-06, + "loss": 1.1478, + "step": 1531 + }, + { + "epoch": 2.940950552088334, + "grad_norm": 0.6380830987617244, + "learning_rate": 4.345154431255559e-06, + "loss": 1.1626, + "step": 1532 + }, + { + "epoch": 2.942870859337494, + "grad_norm": 0.6189462026330061, + "learning_rate": 4.338511394300549e-06, + "loss": 1.1721, + "step": 1533 + }, + { + "epoch": 2.9447911665866537, + "grad_norm": 0.5954787088500139, + "learning_rate": 4.331869545592834e-06, + "loss": 1.2768, + "step": 1534 + }, + { + "epoch": 2.9467114738358138, + "grad_norm": 0.6099943998376384, + "learning_rate": 4.325228897063316e-06, + "loss": 1.2215, + "step": 1535 + }, + { + "epoch": 2.9486317810849734, + "grad_norm": 0.596414187956695, + "learning_rate": 4.318589460640748e-06, + "loss": 1.2065, + "step": 1536 + }, + { + "epoch": 2.9505520883341334, + "grad_norm": 0.5572935791426047, + "learning_rate": 4.311951248251706e-06, + "loss": 1.3073, + "step": 1537 + }, + { + "epoch": 2.9524723955832934, + "grad_norm": 0.6052886400414235, + "learning_rate": 4.30531427182056e-06, + "loss": 1.2854, + "step": 1538 + }, + { + "epoch": 2.954392702832453, + "grad_norm": 0.7271655672959135, + "learning_rate": 4.298678543269463e-06, + "loss": 1.2081, + "step": 1539 + }, + { + "epoch": 2.956313010081613, + "grad_norm": 0.5378551913802601, + "learning_rate": 4.292044074518335e-06, + "loss": 1.4137, + "step": 1540 + }, + { + "epoch": 2.958233317330773, + "grad_norm": 0.5352069611163749, + "learning_rate": 4.2854108774848205e-06, + "loss": 1.1978, + "step": 1541 + }, + { + "epoch": 2.9601536245799327, + "grad_norm": 0.6477438905633637, + "learning_rate": 4.278778964084284e-06, + "loss": 1.4337, + "step": 1542 + }, + { + "epoch": 2.9620739318290927, + "grad_norm": 0.6350641894866229, + "learning_rate": 4.272148346229789e-06, + "loss": 1.343, + "step": 1543 + }, + { + "epoch": 2.9639942390782528, + "grad_norm": 0.648098579992315, + "learning_rate": 4.2655190358320665e-06, + "loss": 1.1793, + "step": 1544 + }, + { + "epoch": 2.9659145463274124, + "grad_norm": 0.5675147431907982, + "learning_rate": 4.2588910447994984e-06, + "loss": 1.1826, + "step": 1545 + }, + { + "epoch": 2.9678348535765724, + "grad_norm": 0.5562251145463197, + "learning_rate": 4.2522643850380985e-06, + "loss": 1.3042, + "step": 1546 + }, + { + "epoch": 2.969755160825732, + "grad_norm": 0.5951722972940838, + "learning_rate": 4.2456390684514934e-06, + "loss": 1.1933, + "step": 1547 + }, + { + "epoch": 2.971675468074892, + "grad_norm": 0.585992209920053, + "learning_rate": 4.239015106940887e-06, + "loss": 1.2478, + "step": 1548 + }, + { + "epoch": 2.9735957753240516, + "grad_norm": 0.6390443418921725, + "learning_rate": 4.2323925124050565e-06, + "loss": 1.1196, + "step": 1549 + }, + { + "epoch": 2.9755160825732117, + "grad_norm": 0.5159014061938866, + "learning_rate": 4.225771296740325e-06, + "loss": 1.0836, + "step": 1550 + }, + { + "epoch": 2.9774363898223717, + "grad_norm": 0.5977392002236628, + "learning_rate": 4.2191514718405294e-06, + "loss": 1.2605, + "step": 1551 + }, + { + "epoch": 2.9793566970715313, + "grad_norm": 0.6357097877139801, + "learning_rate": 4.21253304959702e-06, + "loss": 1.3157, + "step": 1552 + }, + { + "epoch": 2.9812770043206913, + "grad_norm": 0.6322697400391105, + "learning_rate": 4.20591604189862e-06, + "loss": 1.2, + "step": 1553 + }, + { + "epoch": 2.9831973115698514, + "grad_norm": 0.6867060625705013, + "learning_rate": 4.1993004606316114e-06, + "loss": 1.2566, + "step": 1554 + }, + { + "epoch": 2.985117618819011, + "grad_norm": 0.5429761264375036, + "learning_rate": 4.192686317679718e-06, + "loss": 1.0437, + "step": 1555 + }, + { + "epoch": 2.987037926068171, + "grad_norm": 0.5915388732923772, + "learning_rate": 4.186073624924077e-06, + "loss": 1.1368, + "step": 1556 + }, + { + "epoch": 2.988958233317331, + "grad_norm": 0.589920236068399, + "learning_rate": 4.179462394243223e-06, + "loss": 0.9987, + "step": 1557 + }, + { + "epoch": 2.9908785405664906, + "grad_norm": 0.6024351644712322, + "learning_rate": 4.172852637513062e-06, + "loss": 1.2706, + "step": 1558 + }, + { + "epoch": 2.9927988478156506, + "grad_norm": 0.6159633084221685, + "learning_rate": 4.16624436660685e-06, + "loss": 1.2461, + "step": 1559 + }, + { + "epoch": 2.9947191550648102, + "grad_norm": 0.6171382523271884, + "learning_rate": 4.1596375933951835e-06, + "loss": 1.4, + "step": 1560 + }, + { + "epoch": 2.9966394623139703, + "grad_norm": 0.6212508915364183, + "learning_rate": 4.153032329745955e-06, + "loss": 1.321, + "step": 1561 + }, + { + "epoch": 2.99855976956313, + "grad_norm": 0.6648103018447411, + "learning_rate": 4.146428587524358e-06, + "loss": 1.1697, + "step": 1562 + }, + { + "epoch": 3.0, + "grad_norm": 0.6571383703715333, + "learning_rate": 4.139826378592845e-06, + "loss": 1.2437, + "step": 1563 + }, + { + "epoch": 3.00192030724916, + "grad_norm": 0.6920035358105474, + "learning_rate": 4.133225714811115e-06, + "loss": 1.2066, + "step": 1564 + }, + { + "epoch": 3.0038406144983196, + "grad_norm": 0.6399341621336804, + "learning_rate": 4.126626608036096e-06, + "loss": 1.2082, + "step": 1565 + }, + { + "epoch": 3.0057609217474797, + "grad_norm": 0.5458267700066459, + "learning_rate": 4.120029070121917e-06, + "loss": 1.1681, + "step": 1566 + }, + { + "epoch": 3.0076812289966393, + "grad_norm": 0.5694502919449518, + "learning_rate": 4.113433112919885e-06, + "loss": 1.2487, + "step": 1567 + }, + { + "epoch": 3.0096015362457993, + "grad_norm": 0.7832581777786879, + "learning_rate": 4.10683874827847e-06, + "loss": 1.3792, + "step": 1568 + }, + { + "epoch": 3.0115218434949593, + "grad_norm": 0.5592078073531346, + "learning_rate": 4.100245988043286e-06, + "loss": 1.1741, + "step": 1569 + }, + { + "epoch": 3.013442150744119, + "grad_norm": 0.6238694169777276, + "learning_rate": 4.093654844057059e-06, + "loss": 1.1667, + "step": 1570 + }, + { + "epoch": 3.015362457993279, + "grad_norm": 0.587291885264133, + "learning_rate": 4.087065328159612e-06, + "loss": 1.1234, + "step": 1571 + }, + { + "epoch": 3.017282765242439, + "grad_norm": 0.6162677364976138, + "learning_rate": 4.080477452187845e-06, + "loss": 1.2024, + "step": 1572 + }, + { + "epoch": 3.0192030724915986, + "grad_norm": 0.5848912417557425, + "learning_rate": 4.073891227975715e-06, + "loss": 1.3834, + "step": 1573 + }, + { + "epoch": 3.0211233797407586, + "grad_norm": 0.65878421299322, + "learning_rate": 4.067306667354206e-06, + "loss": 1.2408, + "step": 1574 + }, + { + "epoch": 3.023043686989918, + "grad_norm": 0.6448287717472746, + "learning_rate": 4.060723782151318e-06, + "loss": 1.058, + "step": 1575 + }, + { + "epoch": 3.0249639942390782, + "grad_norm": 0.6953036055288947, + "learning_rate": 4.05414258419204e-06, + "loss": 1.2057, + "step": 1576 + }, + { + "epoch": 3.0268843014882383, + "grad_norm": 0.6220136256274538, + "learning_rate": 4.047563085298329e-06, + "loss": 1.3089, + "step": 1577 + }, + { + "epoch": 3.028804608737398, + "grad_norm": 0.523585103987251, + "learning_rate": 4.040985297289093e-06, + "loss": 1.1489, + "step": 1578 + }, + { + "epoch": 3.030724915986558, + "grad_norm": 0.616559579446188, + "learning_rate": 4.0344092319801645e-06, + "loss": 1.3165, + "step": 1579 + }, + { + "epoch": 3.0326452232357175, + "grad_norm": 0.6460233062719725, + "learning_rate": 4.0278349011842806e-06, + "loss": 1.1542, + "step": 1580 + }, + { + "epoch": 3.0345655304848775, + "grad_norm": 0.5765918776286582, + "learning_rate": 4.021262316711063e-06, + "loss": 1.4602, + "step": 1581 + }, + { + "epoch": 3.0364858377340376, + "grad_norm": 0.5644789189860252, + "learning_rate": 4.014691490367e-06, + "loss": 1.144, + "step": 1582 + }, + { + "epoch": 3.038406144983197, + "grad_norm": 0.6677872569767793, + "learning_rate": 4.0081224339554195e-06, + "loss": 1.3855, + "step": 1583 + }, + { + "epoch": 3.040326452232357, + "grad_norm": 0.7113153244823154, + "learning_rate": 4.001555159276467e-06, + "loss": 1.4033, + "step": 1584 + }, + { + "epoch": 3.0422467594815172, + "grad_norm": 0.5835833846209828, + "learning_rate": 3.9949896781270896e-06, + "loss": 1.0761, + "step": 1585 + }, + { + "epoch": 3.044167066730677, + "grad_norm": 0.5637665556775909, + "learning_rate": 3.988426002301016e-06, + "loss": 1.1845, + "step": 1586 + }, + { + "epoch": 3.046087373979837, + "grad_norm": 0.6252805651167873, + "learning_rate": 3.981864143588728e-06, + "loss": 1.3112, + "step": 1587 + }, + { + "epoch": 3.0480076812289965, + "grad_norm": 0.7319781620532901, + "learning_rate": 3.9753041137774414e-06, + "loss": 1.2899, + "step": 1588 + }, + { + "epoch": 3.0499279884781565, + "grad_norm": 0.6298873734021522, + "learning_rate": 3.968745924651095e-06, + "loss": 1.2116, + "step": 1589 + }, + { + "epoch": 3.0518482957273165, + "grad_norm": 0.4949713261879294, + "learning_rate": 3.96218958799031e-06, + "loss": 1.242, + "step": 1590 + }, + { + "epoch": 3.053768602976476, + "grad_norm": 0.6200017520567424, + "learning_rate": 3.955635115572388e-06, + "loss": 1.3045, + "step": 1591 + }, + { + "epoch": 3.055688910225636, + "grad_norm": 0.5810261764330916, + "learning_rate": 3.949082519171282e-06, + "loss": 1.1125, + "step": 1592 + }, + { + "epoch": 3.0576092174747957, + "grad_norm": 0.591784273111627, + "learning_rate": 3.9425318105575695e-06, + "loss": 1.2647, + "step": 1593 + }, + { + "epoch": 3.059529524723956, + "grad_norm": 0.5250244430496426, + "learning_rate": 3.935983001498439e-06, + "loss": 1.093, + "step": 1594 + }, + { + "epoch": 3.061449831973116, + "grad_norm": 0.5755165187513511, + "learning_rate": 3.929436103757671e-06, + "loss": 1.2255, + "step": 1595 + }, + { + "epoch": 3.0633701392222754, + "grad_norm": 0.5582108348145326, + "learning_rate": 3.922891129095609e-06, + "loss": 1.1193, + "step": 1596 + }, + { + "epoch": 3.0652904464714354, + "grad_norm": 0.5996086215288114, + "learning_rate": 3.916348089269142e-06, + "loss": 1.2377, + "step": 1597 + }, + { + "epoch": 3.0672107537205955, + "grad_norm": 0.5630731883301092, + "learning_rate": 3.9098069960316805e-06, + "loss": 1.2896, + "step": 1598 + }, + { + "epoch": 3.069131060969755, + "grad_norm": 0.582096728595382, + "learning_rate": 3.903267861133148e-06, + "loss": 1.1979, + "step": 1599 + }, + { + "epoch": 3.071051368218915, + "grad_norm": 0.5504399438416016, + "learning_rate": 3.8967306963199394e-06, + "loss": 1.3029, + "step": 1600 + }, + { + "epoch": 3.0729716754680747, + "grad_norm": 0.563082626367888, + "learning_rate": 3.890195513334916e-06, + "loss": 1.129, + "step": 1601 + }, + { + "epoch": 3.0748919827172347, + "grad_norm": 0.643164857362494, + "learning_rate": 3.8836623239173794e-06, + "loss": 1.16, + "step": 1602 + }, + { + "epoch": 3.0768122899663948, + "grad_norm": 0.6032051324771863, + "learning_rate": 3.877131139803046e-06, + "loss": 1.2969, + "step": 1603 + }, + { + "epoch": 3.0787325972155544, + "grad_norm": 0.6193502711093731, + "learning_rate": 3.870601972724036e-06, + "loss": 1.229, + "step": 1604 + }, + { + "epoch": 3.0806529044647144, + "grad_norm": 0.579708043718428, + "learning_rate": 3.864074834408843e-06, + "loss": 1.0994, + "step": 1605 + }, + { + "epoch": 3.082573211713874, + "grad_norm": 0.5884146793223665, + "learning_rate": 3.8575497365823164e-06, + "loss": 1.1923, + "step": 1606 + }, + { + "epoch": 3.084493518963034, + "grad_norm": 0.6142438769644465, + "learning_rate": 3.851026690965638e-06, + "loss": 1.1717, + "step": 1607 + }, + { + "epoch": 3.086413826212194, + "grad_norm": 0.5424753177028429, + "learning_rate": 3.8445057092763086e-06, + "loss": 1.0365, + "step": 1608 + }, + { + "epoch": 3.0883341334613537, + "grad_norm": 0.4959430365284454, + "learning_rate": 3.8379868032281195e-06, + "loss": 1.0071, + "step": 1609 + }, + { + "epoch": 3.0902544407105137, + "grad_norm": 0.7446978487309537, + "learning_rate": 3.8314699845311295e-06, + "loss": 1.1553, + "step": 1610 + }, + { + "epoch": 3.0921747479596737, + "grad_norm": 0.5998373711991599, + "learning_rate": 3.82495526489165e-06, + "loss": 1.177, + "step": 1611 + }, + { + "epoch": 3.0940950552088333, + "grad_norm": 0.5841397662737307, + "learning_rate": 3.818442656012228e-06, + "loss": 1.269, + "step": 1612 + }, + { + "epoch": 3.0960153624579934, + "grad_norm": 0.5769368937761744, + "learning_rate": 3.8119321695916085e-06, + "loss": 1.0473, + "step": 1613 + }, + { + "epoch": 3.097935669707153, + "grad_norm": 0.5234373926670447, + "learning_rate": 3.8054238173247295e-06, + "loss": 1.2028, + "step": 1614 + }, + { + "epoch": 3.099855976956313, + "grad_norm": 0.6801987866611497, + "learning_rate": 3.7989176109026994e-06, + "loss": 1.2221, + "step": 1615 + }, + { + "epoch": 3.101776284205473, + "grad_norm": 2.413473420503517, + "learning_rate": 3.792413562012761e-06, + "loss": 1.1262, + "step": 1616 + }, + { + "epoch": 3.1036965914546326, + "grad_norm": 0.5599867362671768, + "learning_rate": 3.7859116823382917e-06, + "loss": 1.1987, + "step": 1617 + }, + { + "epoch": 3.1056168987037926, + "grad_norm": 0.6321518965143011, + "learning_rate": 3.7794119835587687e-06, + "loss": 1.4875, + "step": 1618 + }, + { + "epoch": 3.1075372059529527, + "grad_norm": 0.711430130819764, + "learning_rate": 3.7729144773497494e-06, + "loss": 1.0819, + "step": 1619 + }, + { + "epoch": 3.1094575132021123, + "grad_norm": 0.5871289275907895, + "learning_rate": 3.7664191753828536e-06, + "loss": 1.2429, + "step": 1620 + }, + { + "epoch": 3.1113778204512723, + "grad_norm": 0.5778937741434451, + "learning_rate": 3.7599260893257467e-06, + "loss": 1.3067, + "step": 1621 + }, + { + "epoch": 3.113298127700432, + "grad_norm": 0.5638744005486488, + "learning_rate": 3.7534352308421075e-06, + "loss": 1.2885, + "step": 1622 + }, + { + "epoch": 3.115218434949592, + "grad_norm": 0.5833646712126005, + "learning_rate": 3.746946611591613e-06, + "loss": 1.1909, + "step": 1623 + }, + { + "epoch": 3.117138742198752, + "grad_norm": 0.6871129305846501, + "learning_rate": 3.740460243229923e-06, + "loss": 1.2821, + "step": 1624 + }, + { + "epoch": 3.1190590494479116, + "grad_norm": 0.5819175805660236, + "learning_rate": 3.7339761374086514e-06, + "loss": 1.3875, + "step": 1625 + }, + { + "epoch": 3.1209793566970716, + "grad_norm": 0.5505725166302563, + "learning_rate": 3.7274943057753455e-06, + "loss": 1.2642, + "step": 1626 + }, + { + "epoch": 3.122899663946231, + "grad_norm": 0.5624663721371538, + "learning_rate": 3.721014759973469e-06, + "loss": 1.165, + "step": 1627 + }, + { + "epoch": 3.1248199711953912, + "grad_norm": 0.6253863089417274, + "learning_rate": 3.7145375116423847e-06, + "loss": 1.1946, + "step": 1628 + }, + { + "epoch": 3.1267402784445513, + "grad_norm": 0.6393973620026608, + "learning_rate": 3.708062572417317e-06, + "loss": 1.2315, + "step": 1629 + }, + { + "epoch": 3.128660585693711, + "grad_norm": 0.5529079619612915, + "learning_rate": 3.701589953929354e-06, + "loss": 1.2356, + "step": 1630 + }, + { + "epoch": 3.130580892942871, + "grad_norm": 0.5643072069737562, + "learning_rate": 3.695119667805409e-06, + "loss": 1.2191, + "step": 1631 + }, + { + "epoch": 3.132501200192031, + "grad_norm": 0.6334348417032338, + "learning_rate": 3.6886517256682053e-06, + "loss": 1.2184, + "step": 1632 + }, + { + "epoch": 3.1344215074411905, + "grad_norm": 0.6201000035518202, + "learning_rate": 3.682186139136258e-06, + "loss": 1.2566, + "step": 1633 + }, + { + "epoch": 3.1363418146903506, + "grad_norm": 0.6006711259882118, + "learning_rate": 3.67572291982385e-06, + "loss": 1.0511, + "step": 1634 + }, + { + "epoch": 3.13826212193951, + "grad_norm": 0.5745444576549662, + "learning_rate": 3.6692620793410133e-06, + "loss": 1.2195, + "step": 1635 + }, + { + "epoch": 3.14018242918867, + "grad_norm": 0.5847999039782024, + "learning_rate": 3.662803629293501e-06, + "loss": 1.0565, + "step": 1636 + }, + { + "epoch": 3.1421027364378302, + "grad_norm": 0.6461746117945114, + "learning_rate": 3.6563475812827796e-06, + "loss": 1.3153, + "step": 1637 + }, + { + "epoch": 3.14402304368699, + "grad_norm": 0.6611208527304534, + "learning_rate": 3.649893946905999e-06, + "loss": 1.2453, + "step": 1638 + }, + { + "epoch": 3.14594335093615, + "grad_norm": 0.5794516840663431, + "learning_rate": 3.6434427377559676e-06, + "loss": 1.0786, + "step": 1639 + }, + { + "epoch": 3.1478636581853094, + "grad_norm": 0.5999942114450999, + "learning_rate": 3.636993965421144e-06, + "loss": 1.2188, + "step": 1640 + }, + { + "epoch": 3.1497839654344695, + "grad_norm": 0.6194386543782767, + "learning_rate": 3.6305476414856088e-06, + "loss": 1.2457, + "step": 1641 + }, + { + "epoch": 3.1517042726836295, + "grad_norm": 0.555263722649073, + "learning_rate": 3.62410377752904e-06, + "loss": 1.2712, + "step": 1642 + }, + { + "epoch": 3.153624579932789, + "grad_norm": 0.5844817475259695, + "learning_rate": 3.6176623851267012e-06, + "loss": 1.2555, + "step": 1643 + }, + { + "epoch": 3.155544887181949, + "grad_norm": 0.5993831370634624, + "learning_rate": 3.6112234758494156e-06, + "loss": 1.2169, + "step": 1644 + }, + { + "epoch": 3.157465194431109, + "grad_norm": 0.5902176968126847, + "learning_rate": 3.6047870612635418e-06, + "loss": 1.1525, + "step": 1645 + }, + { + "epoch": 3.1593855016802688, + "grad_norm": 0.537878083590311, + "learning_rate": 3.5983531529309625e-06, + "loss": 1.1491, + "step": 1646 + }, + { + "epoch": 3.161305808929429, + "grad_norm": 0.5972988816949006, + "learning_rate": 3.5919217624090557e-06, + "loss": 1.1015, + "step": 1647 + }, + { + "epoch": 3.1632261161785884, + "grad_norm": 0.5629669162740835, + "learning_rate": 3.5854929012506788e-06, + "loss": 1.1488, + "step": 1648 + }, + { + "epoch": 3.1651464234277484, + "grad_norm": 0.610082594427042, + "learning_rate": 3.5790665810041403e-06, + "loss": 1.2249, + "step": 1649 + }, + { + "epoch": 3.1670667306769085, + "grad_norm": 0.5041376206852936, + "learning_rate": 3.5726428132131902e-06, + "loss": 1.1544, + "step": 1650 + }, + { + "epoch": 3.168987037926068, + "grad_norm": 0.5288827095379369, + "learning_rate": 3.566221609416993e-06, + "loss": 1.1402, + "step": 1651 + }, + { + "epoch": 3.170907345175228, + "grad_norm": 0.6227424068796852, + "learning_rate": 3.559802981150102e-06, + "loss": 1.2278, + "step": 1652 + }, + { + "epoch": 3.172827652424388, + "grad_norm": 0.6102457355287448, + "learning_rate": 3.553386939942448e-06, + "loss": 1.2731, + "step": 1653 + }, + { + "epoch": 3.1747479596735477, + "grad_norm": 0.634257996410596, + "learning_rate": 3.546973497319319e-06, + "loss": 1.2311, + "step": 1654 + }, + { + "epoch": 3.1766682669227078, + "grad_norm": 0.5388738431455334, + "learning_rate": 3.540562664801326e-06, + "loss": 1.2117, + "step": 1655 + }, + { + "epoch": 3.1785885741718674, + "grad_norm": 0.6531673100230599, + "learning_rate": 3.534154453904396e-06, + "loss": 1.319, + "step": 1656 + }, + { + "epoch": 3.1805088814210274, + "grad_norm": 0.5849292721730915, + "learning_rate": 3.5277488761397473e-06, + "loss": 1.2522, + "step": 1657 + }, + { + "epoch": 3.1824291886701874, + "grad_norm": 0.5510644299625856, + "learning_rate": 3.5213459430138697e-06, + "loss": 1.0965, + "step": 1658 + }, + { + "epoch": 3.184349495919347, + "grad_norm": 0.5685397687399799, + "learning_rate": 3.5149456660284974e-06, + "loss": 1.2646, + "step": 1659 + }, + { + "epoch": 3.186269803168507, + "grad_norm": 0.5679326815521467, + "learning_rate": 3.5085480566805963e-06, + "loss": 1.1824, + "step": 1660 + }, + { + "epoch": 3.1881901104176666, + "grad_norm": 0.6423989065041207, + "learning_rate": 3.5021531264623425e-06, + "loss": 1.2525, + "step": 1661 + }, + { + "epoch": 3.1901104176668267, + "grad_norm": 0.637048957903663, + "learning_rate": 3.495760886861093e-06, + "loss": 1.326, + "step": 1662 + }, + { + "epoch": 3.1920307249159867, + "grad_norm": 0.5765276082947753, + "learning_rate": 3.4893713493593793e-06, + "loss": 1.3629, + "step": 1663 + }, + { + "epoch": 3.1939510321651463, + "grad_norm": 0.5826044555587773, + "learning_rate": 3.482984525434876e-06, + "loss": 1.1853, + "step": 1664 + }, + { + "epoch": 3.1958713394143063, + "grad_norm": 0.6214006280883043, + "learning_rate": 3.476600426560379e-06, + "loss": 1.298, + "step": 1665 + }, + { + "epoch": 3.1977916466634664, + "grad_norm": 0.593500861887748, + "learning_rate": 3.470219064203795e-06, + "loss": 1.2198, + "step": 1666 + }, + { + "epoch": 3.199711953912626, + "grad_norm": 0.5354801643665571, + "learning_rate": 3.4638404498281143e-06, + "loss": 1.4108, + "step": 1667 + }, + { + "epoch": 3.201632261161786, + "grad_norm": 0.5665263533752104, + "learning_rate": 3.4574645948913866e-06, + "loss": 1.1645, + "step": 1668 + }, + { + "epoch": 3.2035525684109456, + "grad_norm": 0.5852075208291369, + "learning_rate": 3.4510915108467104e-06, + "loss": 1.1839, + "step": 1669 + }, + { + "epoch": 3.2054728756601056, + "grad_norm": 0.5671818908915182, + "learning_rate": 3.444721209142201e-06, + "loss": 1.2058, + "step": 1670 + }, + { + "epoch": 3.2073931829092657, + "grad_norm": 0.5781614765989507, + "learning_rate": 3.438353701220983e-06, + "loss": 1.0971, + "step": 1671 + }, + { + "epoch": 3.2093134901584253, + "grad_norm": 0.5386223485831203, + "learning_rate": 3.431988998521155e-06, + "loss": 1.3075, + "step": 1672 + }, + { + "epoch": 3.2112337974075853, + "grad_norm": 0.5732210092305149, + "learning_rate": 3.4256271124757788e-06, + "loss": 1.0875, + "step": 1673 + }, + { + "epoch": 3.213154104656745, + "grad_norm": 0.5869267037628796, + "learning_rate": 3.4192680545128636e-06, + "loss": 1.3767, + "step": 1674 + }, + { + "epoch": 3.215074411905905, + "grad_norm": 0.5325613229717368, + "learning_rate": 3.412911836055325e-06, + "loss": 1.3076, + "step": 1675 + }, + { + "epoch": 3.216994719155065, + "grad_norm": 0.565442512250459, + "learning_rate": 3.4065584685209895e-06, + "loss": 1.2278, + "step": 1676 + }, + { + "epoch": 3.2189150264042246, + "grad_norm": 0.6170263179158518, + "learning_rate": 3.400207963322558e-06, + "loss": 1.2688, + "step": 1677 + }, + { + "epoch": 3.2208353336533846, + "grad_norm": 0.5565948749888939, + "learning_rate": 3.3938603318675888e-06, + "loss": 1.1383, + "step": 1678 + }, + { + "epoch": 3.2227556409025446, + "grad_norm": 0.5876548993991542, + "learning_rate": 3.3875155855584783e-06, + "loss": 1.2766, + "step": 1679 + }, + { + "epoch": 3.224675948151704, + "grad_norm": 0.6191645289657872, + "learning_rate": 3.381173735792445e-06, + "loss": 1.3438, + "step": 1680 + }, + { + "epoch": 3.2265962554008643, + "grad_norm": 0.6074559127636022, + "learning_rate": 3.374834793961497e-06, + "loss": 1.2056, + "step": 1681 + }, + { + "epoch": 3.228516562650024, + "grad_norm": 0.732680771432751, + "learning_rate": 3.368498771452422e-06, + "loss": 1.3393, + "step": 1682 + }, + { + "epoch": 3.230436869899184, + "grad_norm": 0.6358066065023189, + "learning_rate": 3.3621656796467663e-06, + "loss": 1.244, + "step": 1683 + }, + { + "epoch": 3.232357177148344, + "grad_norm": 0.5515238109498135, + "learning_rate": 3.355835529920808e-06, + "loss": 1.2092, + "step": 1684 + }, + { + "epoch": 3.2342774843975035, + "grad_norm": 0.6562888547491579, + "learning_rate": 3.3495083336455415e-06, + "loss": 1.2785, + "step": 1685 + }, + { + "epoch": 3.2361977916466635, + "grad_norm": 0.5545700372761027, + "learning_rate": 3.3431841021866553e-06, + "loss": 1.2815, + "step": 1686 + }, + { + "epoch": 3.238118098895823, + "grad_norm": 0.524298548791096, + "learning_rate": 3.3368628469045163e-06, + "loss": 1.1526, + "step": 1687 + }, + { + "epoch": 3.240038406144983, + "grad_norm": 0.6912927841649699, + "learning_rate": 3.330544579154135e-06, + "loss": 1.2902, + "step": 1688 + }, + { + "epoch": 3.241958713394143, + "grad_norm": 0.5097802435995797, + "learning_rate": 3.3242293102851673e-06, + "loss": 1.207, + "step": 1689 + }, + { + "epoch": 3.243879020643303, + "grad_norm": 0.5961740090549886, + "learning_rate": 3.3179170516418766e-06, + "loss": 1.1062, + "step": 1690 + }, + { + "epoch": 3.245799327892463, + "grad_norm": 0.6328813662130345, + "learning_rate": 3.3116078145631176e-06, + "loss": 1.2825, + "step": 1691 + }, + { + "epoch": 3.247719635141623, + "grad_norm": 0.5253530576460194, + "learning_rate": 3.3053016103823177e-06, + "loss": 1.2221, + "step": 1692 + }, + { + "epoch": 3.2496399423907825, + "grad_norm": 0.5367243715039445, + "learning_rate": 3.298998450427461e-06, + "loss": 1.1638, + "step": 1693 + }, + { + "epoch": 3.2515602496399425, + "grad_norm": 0.5993110147361436, + "learning_rate": 3.2926983460210564e-06, + "loss": 1.1667, + "step": 1694 + }, + { + "epoch": 3.253480556889102, + "grad_norm": 0.6207172496829787, + "learning_rate": 3.2864013084801284e-06, + "loss": 1.2983, + "step": 1695 + }, + { + "epoch": 3.255400864138262, + "grad_norm": 0.587033430998679, + "learning_rate": 3.280107349116191e-06, + "loss": 1.2601, + "step": 1696 + }, + { + "epoch": 3.257321171387422, + "grad_norm": 0.5575283763975588, + "learning_rate": 3.2738164792352313e-06, + "loss": 1.047, + "step": 1697 + }, + { + "epoch": 3.2592414786365818, + "grad_norm": 0.6111926828335472, + "learning_rate": 3.2675287101376816e-06, + "loss": 1.2537, + "step": 1698 + }, + { + "epoch": 3.261161785885742, + "grad_norm": 0.5996719274287939, + "learning_rate": 3.2612440531184066e-06, + "loss": 1.211, + "step": 1699 + }, + { + "epoch": 3.2630820931349014, + "grad_norm": 0.5307610081118879, + "learning_rate": 3.254962519466686e-06, + "loss": 1.1324, + "step": 1700 + }, + { + "epoch": 3.2650024003840614, + "grad_norm": 0.5701255799064411, + "learning_rate": 3.248684120466178e-06, + "loss": 1.2563, + "step": 1701 + }, + { + "epoch": 3.2669227076332215, + "grad_norm": 0.5972422198489592, + "learning_rate": 3.2424088673949195e-06, + "loss": 1.3352, + "step": 1702 + }, + { + "epoch": 3.268843014882381, + "grad_norm": 0.5866893251323804, + "learning_rate": 3.236136771525293e-06, + "loss": 1.2768, + "step": 1703 + }, + { + "epoch": 3.270763322131541, + "grad_norm": 0.5475694707088852, + "learning_rate": 3.229867844124006e-06, + "loss": 1.0844, + "step": 1704 + }, + { + "epoch": 3.272683629380701, + "grad_norm": 0.6269793925403148, + "learning_rate": 3.223602096452082e-06, + "loss": 1.2475, + "step": 1705 + }, + { + "epoch": 3.2746039366298607, + "grad_norm": 0.5883008394098636, + "learning_rate": 3.217339539764829e-06, + "loss": 1.3549, + "step": 1706 + }, + { + "epoch": 3.2765242438790207, + "grad_norm": 0.6289915199869982, + "learning_rate": 3.211080185311818e-06, + "loss": 1.2237, + "step": 1707 + }, + { + "epoch": 3.2784445511281803, + "grad_norm": 0.5302237117536894, + "learning_rate": 3.2048240443368745e-06, + "loss": 1.1993, + "step": 1708 + }, + { + "epoch": 3.2803648583773404, + "grad_norm": 0.6318413679037195, + "learning_rate": 3.1985711280780507e-06, + "loss": 1.2809, + "step": 1709 + }, + { + "epoch": 3.2822851656265004, + "grad_norm": 0.5833199543416004, + "learning_rate": 3.1923214477676044e-06, + "loss": 1.1388, + "step": 1710 + }, + { + "epoch": 3.28420547287566, + "grad_norm": 0.6159472717533656, + "learning_rate": 3.18607501463198e-06, + "loss": 1.3148, + "step": 1711 + }, + { + "epoch": 3.28612578012482, + "grad_norm": 0.7033210591341861, + "learning_rate": 3.179831839891788e-06, + "loss": 1.1797, + "step": 1712 + }, + { + "epoch": 3.2880460873739796, + "grad_norm": 0.5623958162691391, + "learning_rate": 3.1735919347617936e-06, + "loss": 1.1396, + "step": 1713 + }, + { + "epoch": 3.2899663946231397, + "grad_norm": 0.6051741936134082, + "learning_rate": 3.167355310450877e-06, + "loss": 1.2096, + "step": 1714 + }, + { + "epoch": 3.2918867018722997, + "grad_norm": 0.6081352201568412, + "learning_rate": 3.161121978162034e-06, + "loss": 1.3922, + "step": 1715 + }, + { + "epoch": 3.2938070091214593, + "grad_norm": 0.6360485189663261, + "learning_rate": 3.1548919490923422e-06, + "loss": 1.4821, + "step": 1716 + }, + { + "epoch": 3.2957273163706193, + "grad_norm": 0.5957816375794027, + "learning_rate": 3.1486652344329464e-06, + "loss": 1.123, + "step": 1717 + }, + { + "epoch": 3.2976476236197794, + "grad_norm": 0.5507547376945735, + "learning_rate": 3.1424418453690402e-06, + "loss": 1.1823, + "step": 1718 + }, + { + "epoch": 3.299567930868939, + "grad_norm": 0.5653398379962096, + "learning_rate": 3.1362217930798412e-06, + "loss": 1.041, + "step": 1719 + }, + { + "epoch": 3.301488238118099, + "grad_norm": 0.5496605395195252, + "learning_rate": 3.130005088738572e-06, + "loss": 1.073, + "step": 1720 + }, + { + "epoch": 3.3034085453672586, + "grad_norm": 0.602998751247224, + "learning_rate": 3.123791743512442e-06, + "loss": 1.1657, + "step": 1721 + }, + { + "epoch": 3.3053288526164186, + "grad_norm": 0.522135676757074, + "learning_rate": 3.1175817685626285e-06, + "loss": 1.3435, + "step": 1722 + }, + { + "epoch": 3.3072491598655787, + "grad_norm": 0.6154486358013063, + "learning_rate": 3.1113751750442543e-06, + "loss": 1.3508, + "step": 1723 + }, + { + "epoch": 3.3091694671147383, + "grad_norm": 0.5819941923043938, + "learning_rate": 3.1051719741063646e-06, + "loss": 1.2033, + "step": 1724 + }, + { + "epoch": 3.3110897743638983, + "grad_norm": 0.7023278811799296, + "learning_rate": 3.0989721768919136e-06, + "loss": 1.2856, + "step": 1725 + }, + { + "epoch": 3.313010081613058, + "grad_norm": 0.5643501551633457, + "learning_rate": 3.0927757945377413e-06, + "loss": 1.1447, + "step": 1726 + }, + { + "epoch": 3.314930388862218, + "grad_norm": 0.5942252124274704, + "learning_rate": 3.0865828381745515e-06, + "loss": 1.3207, + "step": 1727 + }, + { + "epoch": 3.316850696111378, + "grad_norm": 0.633150171321691, + "learning_rate": 3.0803933189268966e-06, + "loss": 1.1637, + "step": 1728 + }, + { + "epoch": 3.3187710033605375, + "grad_norm": 0.5790980766279281, + "learning_rate": 3.074207247913152e-06, + "loss": 1.1489, + "step": 1729 + }, + { + "epoch": 3.3206913106096976, + "grad_norm": 0.5340726545206791, + "learning_rate": 3.068024636245499e-06, + "loss": 1.2081, + "step": 1730 + }, + { + "epoch": 3.3226116178588576, + "grad_norm": 0.5953408126229657, + "learning_rate": 3.061845495029909e-06, + "loss": 1.1497, + "step": 1731 + }, + { + "epoch": 3.324531925108017, + "grad_norm": 0.5845564231744579, + "learning_rate": 3.055669835366116e-06, + "loss": 1.1156, + "step": 1732 + }, + { + "epoch": 3.3264522323571772, + "grad_norm": 0.576763529181793, + "learning_rate": 3.0494976683475984e-06, + "loss": 1.1608, + "step": 1733 + }, + { + "epoch": 3.328372539606337, + "grad_norm": 0.5285540758625484, + "learning_rate": 3.0433290050615626e-06, + "loss": 1.2116, + "step": 1734 + }, + { + "epoch": 3.330292846855497, + "grad_norm": 0.5515202072231463, + "learning_rate": 3.037163856588924e-06, + "loss": 1.1396, + "step": 1735 + }, + { + "epoch": 3.332213154104657, + "grad_norm": 0.6189898207857463, + "learning_rate": 3.0310022340042798e-06, + "loss": 1.1316, + "step": 1736 + }, + { + "epoch": 3.3341334613538165, + "grad_norm": 0.6728456667516306, + "learning_rate": 3.024844148375895e-06, + "loss": 1.2676, + "step": 1737 + }, + { + "epoch": 3.3360537686029765, + "grad_norm": 0.5874686885325532, + "learning_rate": 3.0186896107656803e-06, + "loss": 1.1275, + "step": 1738 + }, + { + "epoch": 3.337974075852136, + "grad_norm": 0.6051629261993252, + "learning_rate": 3.0125386322291774e-06, + "loss": 1.2689, + "step": 1739 + }, + { + "epoch": 3.339894383101296, + "grad_norm": 0.5961589354815949, + "learning_rate": 3.006391223815528e-06, + "loss": 1.1563, + "step": 1740 + }, + { + "epoch": 3.341814690350456, + "grad_norm": 0.5502748330311173, + "learning_rate": 3.000247396567466e-06, + "loss": 1.3951, + "step": 1741 + }, + { + "epoch": 3.343734997599616, + "grad_norm": 0.5919263458022708, + "learning_rate": 2.9941071615212906e-06, + "loss": 1.1641, + "step": 1742 + }, + { + "epoch": 3.345655304848776, + "grad_norm": 0.5531386125944471, + "learning_rate": 2.987970529706846e-06, + "loss": 1.2821, + "step": 1743 + }, + { + "epoch": 3.347575612097936, + "grad_norm": 0.6933860898885489, + "learning_rate": 2.9818375121475084e-06, + "loss": 1.1853, + "step": 1744 + }, + { + "epoch": 3.3494959193470955, + "grad_norm": 0.5512778920328577, + "learning_rate": 2.975708119860159e-06, + "loss": 1.1247, + "step": 1745 + }, + { + "epoch": 3.3514162265962555, + "grad_norm": 0.585163704258437, + "learning_rate": 2.9695823638551657e-06, + "loss": 1.2356, + "step": 1746 + }, + { + "epoch": 3.353336533845415, + "grad_norm": 0.5406824628772507, + "learning_rate": 2.9634602551363647e-06, + "loss": 1.1759, + "step": 1747 + }, + { + "epoch": 3.355256841094575, + "grad_norm": 0.6697192520786999, + "learning_rate": 2.9573418047010448e-06, + "loss": 1.2201, + "step": 1748 + }, + { + "epoch": 3.357177148343735, + "grad_norm": 0.6441662569968425, + "learning_rate": 2.9512270235399197e-06, + "loss": 1.2409, + "step": 1749 + }, + { + "epoch": 3.3590974555928947, + "grad_norm": 0.6327406641413981, + "learning_rate": 2.9451159226371097e-06, + "loss": 1.2978, + "step": 1750 + }, + { + "epoch": 3.361017762842055, + "grad_norm": 0.5502844928472023, + "learning_rate": 2.9390085129701277e-06, + "loss": 1.1181, + "step": 1751 + }, + { + "epoch": 3.3629380700912144, + "grad_norm": 0.6494500131460058, + "learning_rate": 2.9329048055098582e-06, + "loss": 1.3404, + "step": 1752 + }, + { + "epoch": 3.3648583773403744, + "grad_norm": 0.5103913968704721, + "learning_rate": 2.926804811220528e-06, + "loss": 1.1891, + "step": 1753 + }, + { + "epoch": 3.3667786845895344, + "grad_norm": 0.5700937549996999, + "learning_rate": 2.9207085410596987e-06, + "loss": 1.1803, + "step": 1754 + }, + { + "epoch": 3.368698991838694, + "grad_norm": 0.5860129420661921, + "learning_rate": 2.9146160059782446e-06, + "loss": 1.1368, + "step": 1755 + }, + { + "epoch": 3.370619299087854, + "grad_norm": 0.5847039078714579, + "learning_rate": 2.908527216920325e-06, + "loss": 1.1415, + "step": 1756 + }, + { + "epoch": 3.372539606337014, + "grad_norm": 0.5844617302983922, + "learning_rate": 2.902442184823372e-06, + "loss": 1.2324, + "step": 1757 + }, + { + "epoch": 3.3744599135861737, + "grad_norm": 0.5993142900180137, + "learning_rate": 2.8963609206180715e-06, + "loss": 1.1662, + "step": 1758 + }, + { + "epoch": 3.3763802208353337, + "grad_norm": 0.6172218061313731, + "learning_rate": 2.8902834352283366e-06, + "loss": 1.2897, + "step": 1759 + }, + { + "epoch": 3.3783005280844933, + "grad_norm": 0.5635106967910548, + "learning_rate": 2.884209739571299e-06, + "loss": 1.284, + "step": 1760 + }, + { + "epoch": 3.3802208353336534, + "grad_norm": 0.5587500964622583, + "learning_rate": 2.8781398445572733e-06, + "loss": 1.1566, + "step": 1761 + }, + { + "epoch": 3.3821411425828134, + "grad_norm": 0.5390972919683255, + "learning_rate": 2.8720737610897575e-06, + "loss": 1.1133, + "step": 1762 + }, + { + "epoch": 3.384061449831973, + "grad_norm": 0.5135538944195657, + "learning_rate": 2.8660115000653943e-06, + "loss": 1.1121, + "step": 1763 + }, + { + "epoch": 3.385981757081133, + "grad_norm": 0.5811976220286682, + "learning_rate": 2.8599530723739673e-06, + "loss": 1.2243, + "step": 1764 + }, + { + "epoch": 3.3879020643302926, + "grad_norm": 0.5866524311107482, + "learning_rate": 2.8538984888983673e-06, + "loss": 1.2698, + "step": 1765 + }, + { + "epoch": 3.3898223715794527, + "grad_norm": 0.6050830877127573, + "learning_rate": 2.8478477605145815e-06, + "loss": 1.2486, + "step": 1766 + }, + { + "epoch": 3.3917426788286127, + "grad_norm": 0.584736325025134, + "learning_rate": 2.8418008980916758e-06, + "loss": 1.1104, + "step": 1767 + }, + { + "epoch": 3.3936629860777723, + "grad_norm": 0.550413486392883, + "learning_rate": 2.8357579124917694e-06, + "loss": 1.08, + "step": 1768 + }, + { + "epoch": 3.3955832933269323, + "grad_norm": 0.5599087825633609, + "learning_rate": 2.8297188145700172e-06, + "loss": 1.2269, + "step": 1769 + }, + { + "epoch": 3.3975036005760924, + "grad_norm": 0.5965627660286632, + "learning_rate": 2.823683615174587e-06, + "loss": 1.2645, + "step": 1770 + }, + { + "epoch": 3.399423907825252, + "grad_norm": 0.6357006981125833, + "learning_rate": 2.8176523251466526e-06, + "loss": 1.171, + "step": 1771 + }, + { + "epoch": 3.401344215074412, + "grad_norm": 0.5442537962308716, + "learning_rate": 2.811624955320356e-06, + "loss": 1.2498, + "step": 1772 + }, + { + "epoch": 3.403264522323572, + "grad_norm": 0.7011460563709533, + "learning_rate": 2.805601516522802e-06, + "loss": 1.1573, + "step": 1773 + }, + { + "epoch": 3.4051848295727316, + "grad_norm": 0.5870440174468419, + "learning_rate": 2.799582019574033e-06, + "loss": 1.1148, + "step": 1774 + }, + { + "epoch": 3.4071051368218916, + "grad_norm": 0.5614754253823548, + "learning_rate": 2.7935664752870127e-06, + "loss": 1.2116, + "step": 1775 + }, + { + "epoch": 3.4090254440710512, + "grad_norm": 0.7147183204726885, + "learning_rate": 2.787554894467599e-06, + "loss": 1.2903, + "step": 1776 + }, + { + "epoch": 3.4109457513202113, + "grad_norm": 0.5513142660597017, + "learning_rate": 2.7815472879145377e-06, + "loss": 1.149, + "step": 1777 + }, + { + "epoch": 3.412866058569371, + "grad_norm": 0.6656459616701447, + "learning_rate": 2.7755436664194293e-06, + "loss": 1.2103, + "step": 1778 + }, + { + "epoch": 3.414786365818531, + "grad_norm": 0.5704495849887334, + "learning_rate": 2.7695440407667164e-06, + "loss": 1.2483, + "step": 1779 + }, + { + "epoch": 3.416706673067691, + "grad_norm": 0.5670744266192271, + "learning_rate": 2.7635484217336666e-06, + "loss": 1.1921, + "step": 1780 + }, + { + "epoch": 3.4186269803168505, + "grad_norm": 0.6578350609128759, + "learning_rate": 2.7575568200903523e-06, + "loss": 1.1625, + "step": 1781 + }, + { + "epoch": 3.4205472875660106, + "grad_norm": 0.6384902789953107, + "learning_rate": 2.7515692465996236e-06, + "loss": 1.0885, + "step": 1782 + }, + { + "epoch": 3.4224675948151706, + "grad_norm": 0.6244278971714546, + "learning_rate": 2.7455857120170947e-06, + "loss": 1.4407, + "step": 1783 + }, + { + "epoch": 3.42438790206433, + "grad_norm": 0.5735407000960205, + "learning_rate": 2.739606227091132e-06, + "loss": 1.1879, + "step": 1784 + }, + { + "epoch": 3.4263082093134902, + "grad_norm": 0.5874614013907964, + "learning_rate": 2.7336308025628188e-06, + "loss": 1.1074, + "step": 1785 + }, + { + "epoch": 3.4282285165626503, + "grad_norm": 0.6062877321461567, + "learning_rate": 2.7276594491659523e-06, + "loss": 1.1385, + "step": 1786 + }, + { + "epoch": 3.43014882381181, + "grad_norm": 0.5331184684052833, + "learning_rate": 2.721692177627009e-06, + "loss": 1.138, + "step": 1787 + }, + { + "epoch": 3.43206913106097, + "grad_norm": 0.5578128775962742, + "learning_rate": 2.7157289986651403e-06, + "loss": 1.1855, + "step": 1788 + }, + { + "epoch": 3.4339894383101295, + "grad_norm": 0.59622526558422, + "learning_rate": 2.7097699229921393e-06, + "loss": 1.074, + "step": 1789 + }, + { + "epoch": 3.4359097455592895, + "grad_norm": 0.5347215870463632, + "learning_rate": 2.703814961312433e-06, + "loss": 1.2673, + "step": 1790 + }, + { + "epoch": 3.437830052808449, + "grad_norm": 0.6013837489481205, + "learning_rate": 2.697864124323061e-06, + "loss": 1.1553, + "step": 1791 + }, + { + "epoch": 3.439750360057609, + "grad_norm": 0.5712744686825466, + "learning_rate": 2.6919174227136417e-06, + "loss": 1.0525, + "step": 1792 + }, + { + "epoch": 3.441670667306769, + "grad_norm": 0.5434860911629172, + "learning_rate": 2.6859748671663765e-06, + "loss": 1.0182, + "step": 1793 + }, + { + "epoch": 3.4435909745559288, + "grad_norm": 0.5405711766310275, + "learning_rate": 2.680036468356018e-06, + "loss": 1.0745, + "step": 1794 + }, + { + "epoch": 3.445511281805089, + "grad_norm": 0.5240888758779071, + "learning_rate": 2.674102236949847e-06, + "loss": 1.0571, + "step": 1795 + }, + { + "epoch": 3.447431589054249, + "grad_norm": 0.5632915255440348, + "learning_rate": 2.668172183607659e-06, + "loss": 1.2534, + "step": 1796 + }, + { + "epoch": 3.4493518963034084, + "grad_norm": 0.601652903134989, + "learning_rate": 2.6622463189817504e-06, + "loss": 1.1879, + "step": 1797 + }, + { + "epoch": 3.4512722035525685, + "grad_norm": 0.5663578668634494, + "learning_rate": 2.656324653716884e-06, + "loss": 1.2605, + "step": 1798 + }, + { + "epoch": 3.4531925108017285, + "grad_norm": 0.5314383247058302, + "learning_rate": 2.6504071984502867e-06, + "loss": 1.0559, + "step": 1799 + }, + { + "epoch": 3.455112818050888, + "grad_norm": 0.6171911374157446, + "learning_rate": 2.6444939638116224e-06, + "loss": 1.2559, + "step": 1800 + }, + { + "epoch": 3.457033125300048, + "grad_norm": 0.5890322429837382, + "learning_rate": 2.638584960422969e-06, + "loss": 1.1613, + "step": 1801 + }, + { + "epoch": 3.4589534325492077, + "grad_norm": 0.6428950321974103, + "learning_rate": 2.632680198898805e-06, + "loss": 1.2215, + "step": 1802 + }, + { + "epoch": 3.4608737397983678, + "grad_norm": 0.46766602284721387, + "learning_rate": 2.6267796898459906e-06, + "loss": 1.2254, + "step": 1803 + }, + { + "epoch": 3.4627940470475274, + "grad_norm": 0.5371655431627329, + "learning_rate": 2.6208834438637525e-06, + "loss": 1.0896, + "step": 1804 + }, + { + "epoch": 3.4647143542966874, + "grad_norm": 0.5760093163129807, + "learning_rate": 2.614991471543645e-06, + "loss": 1.1811, + "step": 1805 + }, + { + "epoch": 3.4666346615458474, + "grad_norm": 0.567402786950384, + "learning_rate": 2.6091037834695582e-06, + "loss": 1.1333, + "step": 1806 + }, + { + "epoch": 3.468554968795007, + "grad_norm": 0.6133559259960844, + "learning_rate": 2.6032203902176845e-06, + "loss": 1.1648, + "step": 1807 + }, + { + "epoch": 3.470475276044167, + "grad_norm": 0.621034917980622, + "learning_rate": 2.597341302356495e-06, + "loss": 1.3021, + "step": 1808 + }, + { + "epoch": 3.472395583293327, + "grad_norm": 0.5144163325044085, + "learning_rate": 2.5914665304467366e-06, + "loss": 1.3316, + "step": 1809 + }, + { + "epoch": 3.4743158905424867, + "grad_norm": 0.5561534220585199, + "learning_rate": 2.5855960850413936e-06, + "loss": 1.3548, + "step": 1810 + }, + { + "epoch": 3.4762361977916467, + "grad_norm": 0.5447217704452014, + "learning_rate": 2.5797299766856867e-06, + "loss": 1.1471, + "step": 1811 + }, + { + "epoch": 3.4781565050408068, + "grad_norm": 0.5976405621436739, + "learning_rate": 2.573868215917037e-06, + "loss": 1.1872, + "step": 1812 + }, + { + "epoch": 3.4800768122899663, + "grad_norm": 0.6441746506243241, + "learning_rate": 2.5680108132650666e-06, + "loss": 1.2627, + "step": 1813 + }, + { + "epoch": 3.4819971195391264, + "grad_norm": 0.47131158338471724, + "learning_rate": 2.562157779251561e-06, + "loss": 1.1766, + "step": 1814 + }, + { + "epoch": 3.483917426788286, + "grad_norm": 0.5475551237441895, + "learning_rate": 2.556309124390458e-06, + "loss": 1.179, + "step": 1815 + }, + { + "epoch": 3.485837734037446, + "grad_norm": 0.5249350552465734, + "learning_rate": 2.5504648591878356e-06, + "loss": 1.1162, + "step": 1816 + }, + { + "epoch": 3.487758041286606, + "grad_norm": 0.6001018489992662, + "learning_rate": 2.5446249941418844e-06, + "loss": 1.3092, + "step": 1817 + }, + { + "epoch": 3.4896783485357656, + "grad_norm": 0.557093934938618, + "learning_rate": 2.5387895397428818e-06, + "loss": 1.4397, + "step": 1818 + }, + { + "epoch": 3.4915986557849257, + "grad_norm": 0.5655620118747364, + "learning_rate": 2.5329585064731943e-06, + "loss": 1.2571, + "step": 1819 + }, + { + "epoch": 3.4935189630340853, + "grad_norm": 0.49755615268955417, + "learning_rate": 2.527131904807244e-06, + "loss": 1.2623, + "step": 1820 + }, + { + "epoch": 3.4954392702832453, + "grad_norm": 0.5292452178292242, + "learning_rate": 2.5213097452114855e-06, + "loss": 1.2644, + "step": 1821 + }, + { + "epoch": 3.4973595775324053, + "grad_norm": 0.6492521097438952, + "learning_rate": 2.5154920381444026e-06, + "loss": 1.3385, + "step": 1822 + }, + { + "epoch": 3.499279884781565, + "grad_norm": 0.5680825439750705, + "learning_rate": 2.5096787940564736e-06, + "loss": 1.1441, + "step": 1823 + }, + { + "epoch": 3.501200192030725, + "grad_norm": 0.5171652106285989, + "learning_rate": 2.5038700233901684e-06, + "loss": 1.1449, + "step": 1824 + }, + { + "epoch": 3.503120499279885, + "grad_norm": 0.564220902443786, + "learning_rate": 2.498065736579911e-06, + "loss": 1.135, + "step": 1825 + }, + { + "epoch": 3.5050408065290446, + "grad_norm": 0.564711509194194, + "learning_rate": 2.4922659440520806e-06, + "loss": 1.2659, + "step": 1826 + }, + { + "epoch": 3.5069611137782046, + "grad_norm": 0.5746527631170961, + "learning_rate": 2.4864706562249763e-06, + "loss": 1.1533, + "step": 1827 + }, + { + "epoch": 3.5088814210273642, + "grad_norm": 0.5949206702470733, + "learning_rate": 2.4806798835088066e-06, + "loss": 1.159, + "step": 1828 + }, + { + "epoch": 3.5108017282765243, + "grad_norm": 0.637494866240929, + "learning_rate": 2.4748936363056713e-06, + "loss": 1.3793, + "step": 1829 + }, + { + "epoch": 3.512722035525684, + "grad_norm": 0.6391755598961191, + "learning_rate": 2.4691119250095437e-06, + "loss": 1.2987, + "step": 1830 + }, + { + "epoch": 3.514642342774844, + "grad_norm": 0.5227448277347493, + "learning_rate": 2.4633347600062423e-06, + "loss": 1.145, + "step": 1831 + }, + { + "epoch": 3.516562650024004, + "grad_norm": 0.5887052727669172, + "learning_rate": 2.457562151673421e-06, + "loss": 1.3117, + "step": 1832 + }, + { + "epoch": 3.5184829572731635, + "grad_norm": 0.566594470628361, + "learning_rate": 2.4517941103805537e-06, + "loss": 1.1622, + "step": 1833 + }, + { + "epoch": 3.5204032645223235, + "grad_norm": 0.5586179539205196, + "learning_rate": 2.4460306464889023e-06, + "loss": 1.2178, + "step": 1834 + }, + { + "epoch": 3.5223235717714836, + "grad_norm": 0.5544140723059058, + "learning_rate": 2.440271770351514e-06, + "loss": 1.2272, + "step": 1835 + }, + { + "epoch": 3.524243879020643, + "grad_norm": 0.6089781358442925, + "learning_rate": 2.434517492313188e-06, + "loss": 1.1544, + "step": 1836 + }, + { + "epoch": 3.526164186269803, + "grad_norm": 0.5943704926511746, + "learning_rate": 2.4287678227104712e-06, + "loss": 1.1904, + "step": 1837 + }, + { + "epoch": 3.5280844935189632, + "grad_norm": 0.5421525541653088, + "learning_rate": 2.4230227718716236e-06, + "loss": 1.1347, + "step": 1838 + }, + { + "epoch": 3.530004800768123, + "grad_norm": 0.6670135908993761, + "learning_rate": 2.4172823501166172e-06, + "loss": 1.3405, + "step": 1839 + }, + { + "epoch": 3.531925108017283, + "grad_norm": 0.5377201974084741, + "learning_rate": 2.4115465677571028e-06, + "loss": 1.1601, + "step": 1840 + }, + { + "epoch": 3.5338454152664425, + "grad_norm": 0.5913486192212254, + "learning_rate": 2.4058154350963963e-06, + "loss": 1.2524, + "step": 1841 + }, + { + "epoch": 3.5357657225156025, + "grad_norm": 0.5374332336419092, + "learning_rate": 2.4000889624294665e-06, + "loss": 1.099, + "step": 1842 + }, + { + "epoch": 3.537686029764762, + "grad_norm": 0.5887745224211608, + "learning_rate": 2.394367160042911e-06, + "loss": 1.1857, + "step": 1843 + }, + { + "epoch": 3.539606337013922, + "grad_norm": 0.5516206172056591, + "learning_rate": 2.388650038214933e-06, + "loss": 1.2563, + "step": 1844 + }, + { + "epoch": 3.541526644263082, + "grad_norm": 0.574263565548255, + "learning_rate": 2.382937607215329e-06, + "loss": 1.2654, + "step": 1845 + }, + { + "epoch": 3.5434469515122418, + "grad_norm": 0.5427987503745795, + "learning_rate": 2.377229877305476e-06, + "loss": 1.1811, + "step": 1846 + }, + { + "epoch": 3.545367258761402, + "grad_norm": 0.49740228651330515, + "learning_rate": 2.3715268587382967e-06, + "loss": 1.1092, + "step": 1847 + }, + { + "epoch": 3.547287566010562, + "grad_norm": 0.5993609755655924, + "learning_rate": 2.365828561758259e-06, + "loss": 1.2512, + "step": 1848 + }, + { + "epoch": 3.5492078732597214, + "grad_norm": 0.6341739182946521, + "learning_rate": 2.3601349966013417e-06, + "loss": 1.1896, + "step": 1849 + }, + { + "epoch": 3.5511281805088815, + "grad_norm": 0.5315953942302961, + "learning_rate": 2.354446173495032e-06, + "loss": 1.2611, + "step": 1850 + }, + { + "epoch": 3.5530484877580415, + "grad_norm": 0.5413692300791854, + "learning_rate": 2.348762102658291e-06, + "loss": 1.2119, + "step": 1851 + }, + { + "epoch": 3.554968795007201, + "grad_norm": 0.6239035744436765, + "learning_rate": 2.3430827943015494e-06, + "loss": 1.2557, + "step": 1852 + }, + { + "epoch": 3.556889102256361, + "grad_norm": 0.5254360716867675, + "learning_rate": 2.337408258626679e-06, + "loss": 1.2425, + "step": 1853 + }, + { + "epoch": 3.558809409505521, + "grad_norm": 0.588999453148548, + "learning_rate": 2.3317385058269776e-06, + "loss": 1.1768, + "step": 1854 + }, + { + "epoch": 3.5607297167546808, + "grad_norm": 0.5277711959251866, + "learning_rate": 2.326073546087156e-06, + "loss": 1.0849, + "step": 1855 + }, + { + "epoch": 3.5626500240038403, + "grad_norm": 0.5233906215832763, + "learning_rate": 2.320413389583313e-06, + "loss": 1.1775, + "step": 1856 + }, + { + "epoch": 3.5645703312530004, + "grad_norm": 0.5250558937301562, + "learning_rate": 2.314758046482918e-06, + "loss": 1.0292, + "step": 1857 + }, + { + "epoch": 3.5664906385021604, + "grad_norm": 0.5669772611900041, + "learning_rate": 2.309107526944792e-06, + "loss": 1.2192, + "step": 1858 + }, + { + "epoch": 3.56841094575132, + "grad_norm": 0.5160238123672787, + "learning_rate": 2.3034618411190984e-06, + "loss": 1.2529, + "step": 1859 + }, + { + "epoch": 3.57033125300048, + "grad_norm": 0.5887236184861576, + "learning_rate": 2.2978209991473087e-06, + "loss": 1.1194, + "step": 1860 + }, + { + "epoch": 3.57225156024964, + "grad_norm": 0.5249058997350508, + "learning_rate": 2.2921850111622028e-06, + "loss": 1.2746, + "step": 1861 + }, + { + "epoch": 3.5741718674987997, + "grad_norm": 0.5880060704638931, + "learning_rate": 2.2865538872878323e-06, + "loss": 1.2024, + "step": 1862 + }, + { + "epoch": 3.5760921747479597, + "grad_norm": 0.6357009739018459, + "learning_rate": 2.280927637639519e-06, + "loss": 1.2998, + "step": 1863 + }, + { + "epoch": 3.5780124819971197, + "grad_norm": 0.5797171090906333, + "learning_rate": 2.275306272323821e-06, + "loss": 1.3234, + "step": 1864 + }, + { + "epoch": 3.5799327892462793, + "grad_norm": 0.5840920564263393, + "learning_rate": 2.2696898014385316e-06, + "loss": 1.4129, + "step": 1865 + }, + { + "epoch": 3.5818530964954394, + "grad_norm": 0.618354375475666, + "learning_rate": 2.264078235072645e-06, + "loss": 1.2797, + "step": 1866 + }, + { + "epoch": 3.5837734037445994, + "grad_norm": 0.5188012774357862, + "learning_rate": 2.258471583306345e-06, + "loss": 1.1479, + "step": 1867 + }, + { + "epoch": 3.585693710993759, + "grad_norm": 0.569514349554406, + "learning_rate": 2.252869856210994e-06, + "loss": 1.2236, + "step": 1868 + }, + { + "epoch": 3.5876140182429186, + "grad_norm": 0.58793952212668, + "learning_rate": 2.2472730638491047e-06, + "loss": 1.2698, + "step": 1869 + }, + { + "epoch": 3.5895343254920786, + "grad_norm": 0.6188731233888042, + "learning_rate": 2.2416812162743223e-06, + "loss": 1.0881, + "step": 1870 + }, + { + "epoch": 3.5914546327412387, + "grad_norm": 0.5387211144427339, + "learning_rate": 2.2360943235314114e-06, + "loss": 1.0728, + "step": 1871 + }, + { + "epoch": 3.5933749399903983, + "grad_norm": 0.7911336478988346, + "learning_rate": 2.23051239565624e-06, + "loss": 1.1331, + "step": 1872 + }, + { + "epoch": 3.5952952472395583, + "grad_norm": 0.5508823785480784, + "learning_rate": 2.22493544267575e-06, + "loss": 1.3098, + "step": 1873 + }, + { + "epoch": 3.5972155544887183, + "grad_norm": 0.6150849283382401, + "learning_rate": 2.2193634746079547e-06, + "loss": 1.335, + "step": 1874 + }, + { + "epoch": 3.599135861737878, + "grad_norm": 0.5588796189757888, + "learning_rate": 2.2137965014619068e-06, + "loss": 1.1638, + "step": 1875 + }, + { + "epoch": 3.601056168987038, + "grad_norm": 0.5822473521872475, + "learning_rate": 2.208234533237692e-06, + "loss": 1.1653, + "step": 1876 + }, + { + "epoch": 3.602976476236198, + "grad_norm": 0.5151877487471704, + "learning_rate": 2.202677579926399e-06, + "loss": 1.1493, + "step": 1877 + }, + { + "epoch": 3.6048967834853576, + "grad_norm": 0.5329194834352995, + "learning_rate": 2.197125651510115e-06, + "loss": 1.0473, + "step": 1878 + }, + { + "epoch": 3.6068170907345176, + "grad_norm": 0.5410994196983396, + "learning_rate": 2.1915787579618964e-06, + "loss": 1.2677, + "step": 1879 + }, + { + "epoch": 3.6087373979836777, + "grad_norm": 0.5679006460816588, + "learning_rate": 2.1860369092457538e-06, + "loss": 1.1159, + "step": 1880 + }, + { + "epoch": 3.6106577052328372, + "grad_norm": 0.518401722785567, + "learning_rate": 2.180500115316641e-06, + "loss": 1.1462, + "step": 1881 + }, + { + "epoch": 3.6125780124819973, + "grad_norm": 0.600131070569733, + "learning_rate": 2.17496838612043e-06, + "loss": 1.2327, + "step": 1882 + }, + { + "epoch": 3.614498319731157, + "grad_norm": 0.5479155374654383, + "learning_rate": 2.169441731593893e-06, + "loss": 1.2784, + "step": 1883 + }, + { + "epoch": 3.616418626980317, + "grad_norm": 0.590334807249619, + "learning_rate": 2.163920161664685e-06, + "loss": 1.2462, + "step": 1884 + }, + { + "epoch": 3.6183389342294765, + "grad_norm": 0.5056747752215215, + "learning_rate": 2.1584036862513337e-06, + "loss": 1.1385, + "step": 1885 + }, + { + "epoch": 3.6202592414786365, + "grad_norm": 0.5801961024200594, + "learning_rate": 2.1528923152632082e-06, + "loss": 1.2236, + "step": 1886 + }, + { + "epoch": 3.6221795487277966, + "grad_norm": 0.5612348606195392, + "learning_rate": 2.1473860586005146e-06, + "loss": 1.2676, + "step": 1887 + }, + { + "epoch": 3.624099855976956, + "grad_norm": 0.6391802508918328, + "learning_rate": 2.1418849261542667e-06, + "loss": 1.3785, + "step": 1888 + }, + { + "epoch": 3.626020163226116, + "grad_norm": 0.6341411554094745, + "learning_rate": 2.136388927806279e-06, + "loss": 1.2504, + "step": 1889 + }, + { + "epoch": 3.6279404704752762, + "grad_norm": 0.5246951166416104, + "learning_rate": 2.130898073429137e-06, + "loss": 1.157, + "step": 1890 + }, + { + "epoch": 3.629860777724436, + "grad_norm": 0.5450793776376113, + "learning_rate": 2.1254123728861926e-06, + "loss": 1.1497, + "step": 1891 + }, + { + "epoch": 3.631781084973596, + "grad_norm": 0.5267615427989941, + "learning_rate": 2.1199318360315356e-06, + "loss": 1.3475, + "step": 1892 + }, + { + "epoch": 3.633701392222756, + "grad_norm": 0.6069842377367444, + "learning_rate": 2.114456472709979e-06, + "loss": 1.0705, + "step": 1893 + }, + { + "epoch": 3.6356216994719155, + "grad_norm": 0.6323472407523595, + "learning_rate": 2.1089862927570474e-06, + "loss": 1.2215, + "step": 1894 + }, + { + "epoch": 3.6375420067210755, + "grad_norm": 0.5603944281612804, + "learning_rate": 2.1035213059989525e-06, + "loss": 1.2621, + "step": 1895 + }, + { + "epoch": 3.639462313970235, + "grad_norm": 0.5838797247616405, + "learning_rate": 2.098061522252574e-06, + "loss": 1.304, + "step": 1896 + }, + { + "epoch": 3.641382621219395, + "grad_norm": 0.6247768551228082, + "learning_rate": 2.092606951325448e-06, + "loss": 1.3115, + "step": 1897 + }, + { + "epoch": 3.6433029284685547, + "grad_norm": 0.543680322103595, + "learning_rate": 2.087157603015748e-06, + "loss": 1.2724, + "step": 1898 + }, + { + "epoch": 3.645223235717715, + "grad_norm": 0.5887446439421347, + "learning_rate": 2.0817134871122626e-06, + "loss": 1.2651, + "step": 1899 + }, + { + "epoch": 3.647143542966875, + "grad_norm": 0.5202524908110011, + "learning_rate": 2.076274613394386e-06, + "loss": 1.1101, + "step": 1900 + }, + { + "epoch": 3.6490638502160344, + "grad_norm": 0.5517463248683387, + "learning_rate": 2.070840991632089e-06, + "loss": 1.2618, + "step": 1901 + }, + { + "epoch": 3.6509841574651944, + "grad_norm": 0.5077552856994134, + "learning_rate": 2.0654126315859163e-06, + "loss": 1.2563, + "step": 1902 + }, + { + "epoch": 3.6529044647143545, + "grad_norm": 0.5347751354155122, + "learning_rate": 2.059989543006953e-06, + "loss": 1.201, + "step": 1903 + }, + { + "epoch": 3.654824771963514, + "grad_norm": 0.5765980847886398, + "learning_rate": 2.054571735636822e-06, + "loss": 1.0542, + "step": 1904 + }, + { + "epoch": 3.656745079212674, + "grad_norm": 0.5366339175229861, + "learning_rate": 2.049159219207655e-06, + "loss": 1.2894, + "step": 1905 + }, + { + "epoch": 3.658665386461834, + "grad_norm": 0.6042097233456489, + "learning_rate": 2.043752003442078e-06, + "loss": 1.1219, + "step": 1906 + }, + { + "epoch": 3.6605856937109937, + "grad_norm": 0.5768154137675807, + "learning_rate": 2.0383500980531995e-06, + "loss": 1.2325, + "step": 1907 + }, + { + "epoch": 3.6625060009601538, + "grad_norm": 0.5678349080966371, + "learning_rate": 2.03295351274459e-06, + "loss": 1.2742, + "step": 1908 + }, + { + "epoch": 3.6644263082093134, + "grad_norm": 0.6010003025028522, + "learning_rate": 2.027562257210257e-06, + "loss": 1.2463, + "step": 1909 + }, + { + "epoch": 3.6663466154584734, + "grad_norm": 0.5588849215074105, + "learning_rate": 2.022176341134638e-06, + "loss": 1.2729, + "step": 1910 + }, + { + "epoch": 3.668266922707633, + "grad_norm": 0.5543017873213459, + "learning_rate": 2.01679577419258e-06, + "loss": 1.2726, + "step": 1911 + }, + { + "epoch": 3.670187229956793, + "grad_norm": 0.5315076215174357, + "learning_rate": 2.011420566049319e-06, + "loss": 1.3451, + "step": 1912 + }, + { + "epoch": 3.672107537205953, + "grad_norm": 0.5363706999376442, + "learning_rate": 2.006050726360467e-06, + "loss": 1.2046, + "step": 1913 + }, + { + "epoch": 3.6740278444551127, + "grad_norm": 0.5862591053918536, + "learning_rate": 2.0006862647719887e-06, + "loss": 1.2972, + "step": 1914 + }, + { + "epoch": 3.6759481517042727, + "grad_norm": 0.5742883848469589, + "learning_rate": 1.9953271909201945e-06, + "loss": 1.2134, + "step": 1915 + }, + { + "epoch": 3.6778684589534327, + "grad_norm": 0.5553815154595928, + "learning_rate": 1.989973514431709e-06, + "loss": 1.3178, + "step": 1916 + }, + { + "epoch": 3.6797887662025923, + "grad_norm": 0.5667049762865936, + "learning_rate": 1.984625244923468e-06, + "loss": 1.062, + "step": 1917 + }, + { + "epoch": 3.6817090734517524, + "grad_norm": 0.5223711581824668, + "learning_rate": 1.979282392002691e-06, + "loss": 1.1632, + "step": 1918 + }, + { + "epoch": 3.6836293807009124, + "grad_norm": 0.5322591152244598, + "learning_rate": 1.9739449652668664e-06, + "loss": 1.1938, + "step": 1919 + }, + { + "epoch": 3.685549687950072, + "grad_norm": 0.5374248595182037, + "learning_rate": 1.9686129743037387e-06, + "loss": 1.0015, + "step": 1920 + }, + { + "epoch": 3.687469995199232, + "grad_norm": 0.5862270834728044, + "learning_rate": 1.963286428691289e-06, + "loss": 1.1781, + "step": 1921 + }, + { + "epoch": 3.6893903024483916, + "grad_norm": 0.6024670441784897, + "learning_rate": 1.957965337997712e-06, + "loss": 1.3475, + "step": 1922 + }, + { + "epoch": 3.6913106096975516, + "grad_norm": 0.5305135608448318, + "learning_rate": 1.9526497117814046e-06, + "loss": 1.1613, + "step": 1923 + }, + { + "epoch": 3.6932309169467112, + "grad_norm": 0.517343073664411, + "learning_rate": 1.9473395595909533e-06, + "loss": 1.211, + "step": 1924 + }, + { + "epoch": 3.6951512241958713, + "grad_norm": 0.5894101249232763, + "learning_rate": 1.9420348909651038e-06, + "loss": 1.2506, + "step": 1925 + }, + { + "epoch": 3.6970715314450313, + "grad_norm": 0.5091877521139326, + "learning_rate": 1.9367357154327577e-06, + "loss": 1.182, + "step": 1926 + }, + { + "epoch": 3.698991838694191, + "grad_norm": 0.5970240314899021, + "learning_rate": 1.9314420425129438e-06, + "loss": 1.2058, + "step": 1927 + }, + { + "epoch": 3.700912145943351, + "grad_norm": 0.5349195154634789, + "learning_rate": 1.926153881714813e-06, + "loss": 1.243, + "step": 1928 + }, + { + "epoch": 3.702832453192511, + "grad_norm": 0.5784259002039114, + "learning_rate": 1.920871242537607e-06, + "loss": 1.2283, + "step": 1929 + }, + { + "epoch": 3.7047527604416706, + "grad_norm": 0.6095533596566215, + "learning_rate": 1.9155941344706547e-06, + "loss": 1.2565, + "step": 1930 + }, + { + "epoch": 3.7066730676908306, + "grad_norm": 0.5633441325508014, + "learning_rate": 1.910322566993351e-06, + "loss": 1.1374, + "step": 1931 + }, + { + "epoch": 3.7085933749399906, + "grad_norm": 0.6342349366217632, + "learning_rate": 1.9050565495751271e-06, + "loss": 1.2933, + "step": 1932 + }, + { + "epoch": 3.7105136821891502, + "grad_norm": 0.5315389788107561, + "learning_rate": 1.8997960916754566e-06, + "loss": 1.1748, + "step": 1933 + }, + { + "epoch": 3.7124339894383103, + "grad_norm": 0.5730113606117737, + "learning_rate": 1.8945412027438226e-06, + "loss": 1.3224, + "step": 1934 + }, + { + "epoch": 3.71435429668747, + "grad_norm": 0.5701985784279351, + "learning_rate": 1.8892918922197024e-06, + "loss": 1.2222, + "step": 1935 + }, + { + "epoch": 3.71627460393663, + "grad_norm": 0.49938934585159733, + "learning_rate": 1.8840481695325519e-06, + "loss": 1.1161, + "step": 1936 + }, + { + "epoch": 3.7181949111857895, + "grad_norm": 0.5735656964810506, + "learning_rate": 1.8788100441017953e-06, + "loss": 1.2411, + "step": 1937 + }, + { + "epoch": 3.7201152184349495, + "grad_norm": 0.5481304838508377, + "learning_rate": 1.873577525336795e-06, + "loss": 1.1437, + "step": 1938 + }, + { + "epoch": 3.7220355256841096, + "grad_norm": 0.574351976558758, + "learning_rate": 1.8683506226368487e-06, + "loss": 1.145, + "step": 1939 + }, + { + "epoch": 3.723955832933269, + "grad_norm": 0.5462589392877683, + "learning_rate": 1.8631293453911596e-06, + "loss": 1.276, + "step": 1940 + }, + { + "epoch": 3.725876140182429, + "grad_norm": 0.6110267740181545, + "learning_rate": 1.8579137029788319e-06, + "loss": 1.2301, + "step": 1941 + }, + { + "epoch": 3.727796447431589, + "grad_norm": 0.5841411024587125, + "learning_rate": 1.8527037047688422e-06, + "loss": 1.2214, + "step": 1942 + }, + { + "epoch": 3.729716754680749, + "grad_norm": 0.5118385492139843, + "learning_rate": 1.8474993601200319e-06, + "loss": 1.2187, + "step": 1943 + }, + { + "epoch": 3.731637061929909, + "grad_norm": 0.5676034247574478, + "learning_rate": 1.8423006783810893e-06, + "loss": 1.2057, + "step": 1944 + }, + { + "epoch": 3.733557369179069, + "grad_norm": 0.5528556081006811, + "learning_rate": 1.8371076688905205e-06, + "loss": 1.1309, + "step": 1945 + }, + { + "epoch": 3.7354776764282285, + "grad_norm": 0.5052410964461849, + "learning_rate": 1.8319203409766507e-06, + "loss": 1.3201, + "step": 1946 + }, + { + "epoch": 3.7373979836773885, + "grad_norm": 0.4868763855816056, + "learning_rate": 1.8267387039575995e-06, + "loss": 1.1394, + "step": 1947 + }, + { + "epoch": 3.739318290926548, + "grad_norm": 0.5563992023915842, + "learning_rate": 1.8215627671412605e-06, + "loss": 1.2169, + "step": 1948 + }, + { + "epoch": 3.741238598175708, + "grad_norm": 0.5869584267366091, + "learning_rate": 1.8163925398252858e-06, + "loss": 1.1003, + "step": 1949 + }, + { + "epoch": 3.7431589054248677, + "grad_norm": 0.5521869811812116, + "learning_rate": 1.811228031297077e-06, + "loss": 1.377, + "step": 1950 + }, + { + "epoch": 3.7450792126740278, + "grad_norm": 0.5543033895587416, + "learning_rate": 1.8060692508337612e-06, + "loss": 1.0411, + "step": 1951 + }, + { + "epoch": 3.746999519923188, + "grad_norm": 0.5506350664709555, + "learning_rate": 1.800916207702173e-06, + "loss": 1.2547, + "step": 1952 + }, + { + "epoch": 3.7489198271723474, + "grad_norm": 0.5690001042492241, + "learning_rate": 1.795768911158845e-06, + "loss": 1.2849, + "step": 1953 + }, + { + "epoch": 3.7508401344215074, + "grad_norm": 0.531179546443799, + "learning_rate": 1.7906273704499844e-06, + "loss": 1.145, + "step": 1954 + }, + { + "epoch": 3.7527604416706675, + "grad_norm": 0.582549756683178, + "learning_rate": 1.7854915948114577e-06, + "loss": 1.2923, + "step": 1955 + }, + { + "epoch": 3.754680748919827, + "grad_norm": 0.5741957230295913, + "learning_rate": 1.7803615934687796e-06, + "loss": 1.3019, + "step": 1956 + }, + { + "epoch": 3.756601056168987, + "grad_norm": 0.5952708935332971, + "learning_rate": 1.7752373756370928e-06, + "loss": 1.2448, + "step": 1957 + }, + { + "epoch": 3.758521363418147, + "grad_norm": 0.45360289908973134, + "learning_rate": 1.7701189505211424e-06, + "loss": 1.1101, + "step": 1958 + }, + { + "epoch": 3.7604416706673067, + "grad_norm": 0.5770883329276033, + "learning_rate": 1.7650063273152773e-06, + "loss": 1.0866, + "step": 1959 + }, + { + "epoch": 3.7623619779164668, + "grad_norm": 0.6257745148687909, + "learning_rate": 1.759899515203422e-06, + "loss": 1.1597, + "step": 1960 + }, + { + "epoch": 3.7642822851656264, + "grad_norm": 0.5443528672594468, + "learning_rate": 1.754798523359058e-06, + "loss": 1.2081, + "step": 1961 + }, + { + "epoch": 3.7662025924147864, + "grad_norm": 0.5277313422816199, + "learning_rate": 1.7497033609452192e-06, + "loss": 1.2759, + "step": 1962 + }, + { + "epoch": 3.768122899663946, + "grad_norm": 0.5772524707407268, + "learning_rate": 1.74461403711446e-06, + "loss": 1.1122, + "step": 1963 + }, + { + "epoch": 3.770043206913106, + "grad_norm": 0.5566438958710497, + "learning_rate": 1.739530561008853e-06, + "loss": 1.0622, + "step": 1964 + }, + { + "epoch": 3.771963514162266, + "grad_norm": 0.5763392169098358, + "learning_rate": 1.7344529417599621e-06, + "loss": 1.158, + "step": 1965 + }, + { + "epoch": 3.7738838214114256, + "grad_norm": 0.5818702550156122, + "learning_rate": 1.7293811884888344e-06, + "loss": 1.0797, + "step": 1966 + }, + { + "epoch": 3.7758041286605857, + "grad_norm": 0.5578093486131688, + "learning_rate": 1.7243153103059767e-06, + "loss": 1.2711, + "step": 1967 + }, + { + "epoch": 3.7777244359097457, + "grad_norm": 0.5718313477792928, + "learning_rate": 1.719255316311342e-06, + "loss": 1.1592, + "step": 1968 + }, + { + "epoch": 3.7796447431589053, + "grad_norm": 0.5514243318976497, + "learning_rate": 1.7142012155943155e-06, + "loss": 1.4246, + "step": 1969 + }, + { + "epoch": 3.7815650504080653, + "grad_norm": 0.5640062880284308, + "learning_rate": 1.7091530172336968e-06, + "loss": 1.2891, + "step": 1970 + }, + { + "epoch": 3.7834853576572254, + "grad_norm": 0.5792014089053664, + "learning_rate": 1.7041107302976805e-06, + "loss": 1.1686, + "step": 1971 + }, + { + "epoch": 3.785405664906385, + "grad_norm": 0.5241362329116599, + "learning_rate": 1.6990743638438411e-06, + "loss": 1.3093, + "step": 1972 + }, + { + "epoch": 3.787325972155545, + "grad_norm": 0.571429657846086, + "learning_rate": 1.6940439269191238e-06, + "loss": 1.1688, + "step": 1973 + }, + { + "epoch": 3.7892462794047046, + "grad_norm": 0.568200626395358, + "learning_rate": 1.689019428559816e-06, + "loss": 1.2065, + "step": 1974 + }, + { + "epoch": 3.7911665866538646, + "grad_norm": 0.6458180065748697, + "learning_rate": 1.684000877791543e-06, + "loss": 1.2273, + "step": 1975 + }, + { + "epoch": 3.7930868939030242, + "grad_norm": 0.5730077444301579, + "learning_rate": 1.6789882836292403e-06, + "loss": 1.2397, + "step": 1976 + }, + { + "epoch": 3.7950072011521843, + "grad_norm": 0.5306629719121915, + "learning_rate": 1.673981655077151e-06, + "loss": 1.1026, + "step": 1977 + }, + { + "epoch": 3.7969275084013443, + "grad_norm": 0.5517597068077622, + "learning_rate": 1.6689810011287933e-06, + "loss": 1.3548, + "step": 1978 + }, + { + "epoch": 3.798847815650504, + "grad_norm": 0.5652552576224069, + "learning_rate": 1.6639863307669623e-06, + "loss": 1.2286, + "step": 1979 + }, + { + "epoch": 3.800768122899664, + "grad_norm": 0.593770432593188, + "learning_rate": 1.6589976529636976e-06, + "loss": 1.4277, + "step": 1980 + }, + { + "epoch": 3.802688430148824, + "grad_norm": 0.6141765695690967, + "learning_rate": 1.654014976680275e-06, + "loss": 1.2296, + "step": 1981 + }, + { + "epoch": 3.8046087373979836, + "grad_norm": 0.7344311699548375, + "learning_rate": 1.6490383108671926e-06, + "loss": 1.2088, + "step": 1982 + }, + { + "epoch": 3.8065290446471436, + "grad_norm": 0.5307725652426962, + "learning_rate": 1.6440676644641523e-06, + "loss": 1.0974, + "step": 1983 + }, + { + "epoch": 3.8084493518963036, + "grad_norm": 0.6106785861731785, + "learning_rate": 1.63910304640004e-06, + "loss": 1.1703, + "step": 1984 + }, + { + "epoch": 3.810369659145463, + "grad_norm": 0.5065523031712291, + "learning_rate": 1.6341444655929118e-06, + "loss": 1.067, + "step": 1985 + }, + { + "epoch": 3.8122899663946233, + "grad_norm": 0.5290282281783937, + "learning_rate": 1.6291919309499849e-06, + "loss": 1.1931, + "step": 1986 + }, + { + "epoch": 3.814210273643783, + "grad_norm": 0.5756909462253864, + "learning_rate": 1.6242454513676081e-06, + "loss": 1.2061, + "step": 1987 + }, + { + "epoch": 3.816130580892943, + "grad_norm": 0.5344387052970526, + "learning_rate": 1.6193050357312612e-06, + "loss": 1.0493, + "step": 1988 + }, + { + "epoch": 3.8180508881421025, + "grad_norm": 0.5765401537963266, + "learning_rate": 1.6143706929155233e-06, + "loss": 1.2331, + "step": 1989 + }, + { + "epoch": 3.8199711953912625, + "grad_norm": 0.5581010853368356, + "learning_rate": 1.6094424317840724e-06, + "loss": 1.1258, + "step": 1990 + }, + { + "epoch": 3.8218915026404225, + "grad_norm": 0.5893772542329553, + "learning_rate": 1.6045202611896538e-06, + "loss": 1.2464, + "step": 1991 + }, + { + "epoch": 3.823811809889582, + "grad_norm": 0.586301214602685, + "learning_rate": 1.5996041899740804e-06, + "loss": 1.3658, + "step": 1992 + }, + { + "epoch": 3.825732117138742, + "grad_norm": 0.7514459420830459, + "learning_rate": 1.594694226968203e-06, + "loss": 1.3568, + "step": 1993 + }, + { + "epoch": 3.827652424387902, + "grad_norm": 0.5574884383018454, + "learning_rate": 1.5897903809919008e-06, + "loss": 1.1496, + "step": 1994 + }, + { + "epoch": 3.829572731637062, + "grad_norm": 0.534755159693442, + "learning_rate": 1.584892660854066e-06, + "loss": 1.264, + "step": 1995 + }, + { + "epoch": 3.831493038886222, + "grad_norm": 0.5652873582430399, + "learning_rate": 1.5800010753525896e-06, + "loss": 1.1998, + "step": 1996 + }, + { + "epoch": 3.833413346135382, + "grad_norm": 0.6459096737492138, + "learning_rate": 1.5751156332743373e-06, + "loss": 1.2829, + "step": 1997 + }, + { + "epoch": 3.8353336533845415, + "grad_norm": 0.51946906534724, + "learning_rate": 1.5702363433951407e-06, + "loss": 1.1864, + "step": 1998 + }, + { + "epoch": 3.8372539606337015, + "grad_norm": 0.6045159728899656, + "learning_rate": 1.5653632144797843e-06, + "loss": 1.1264, + "step": 1999 + }, + { + "epoch": 3.839174267882861, + "grad_norm": 0.5358722399052709, + "learning_rate": 1.5604962552819792e-06, + "loss": 1.1872, + "step": 2000 + }, + { + "epoch": 3.841094575132021, + "grad_norm": 0.5649879892237849, + "learning_rate": 1.5556354745443596e-06, + "loss": 1.3079, + "step": 2001 + }, + { + "epoch": 3.8430148823811807, + "grad_norm": 0.5610609768367208, + "learning_rate": 1.550780880998456e-06, + "loss": 1.3389, + "step": 2002 + }, + { + "epoch": 3.8449351896303408, + "grad_norm": 0.5618854245931849, + "learning_rate": 1.54593248336469e-06, + "loss": 1.1915, + "step": 2003 + }, + { + "epoch": 3.846855496879501, + "grad_norm": 0.5989158551654693, + "learning_rate": 1.5410902903523467e-06, + "loss": 1.2535, + "step": 2004 + }, + { + "epoch": 3.8487758041286604, + "grad_norm": 0.640158514860426, + "learning_rate": 1.5362543106595734e-06, + "loss": 1.2073, + "step": 2005 + }, + { + "epoch": 3.8506961113778204, + "grad_norm": 0.5291421982699344, + "learning_rate": 1.5314245529733507e-06, + "loss": 1.2207, + "step": 2006 + }, + { + "epoch": 3.8526164186269805, + "grad_norm": 0.547509352180386, + "learning_rate": 1.5266010259694825e-06, + "loss": 1.1039, + "step": 2007 + }, + { + "epoch": 3.85453672587614, + "grad_norm": 0.5697547412153795, + "learning_rate": 1.5217837383125828e-06, + "loss": 1.1737, + "step": 2008 + }, + { + "epoch": 3.8564570331253, + "grad_norm": 0.49660109889446585, + "learning_rate": 1.5169726986560595e-06, + "loss": 1.1918, + "step": 2009 + }, + { + "epoch": 3.85837734037446, + "grad_norm": 0.563093858490598, + "learning_rate": 1.5121679156420932e-06, + "loss": 1.1136, + "step": 2010 + }, + { + "epoch": 3.8602976476236197, + "grad_norm": 0.5318110302128307, + "learning_rate": 1.507369397901624e-06, + "loss": 1.1644, + "step": 2011 + }, + { + "epoch": 3.8622179548727797, + "grad_norm": 0.5782654488003195, + "learning_rate": 1.5025771540543443e-06, + "loss": 1.3386, + "step": 2012 + }, + { + "epoch": 3.86413826212194, + "grad_norm": 0.6064042115163315, + "learning_rate": 1.4977911927086692e-06, + "loss": 1.3782, + "step": 2013 + }, + { + "epoch": 3.8660585693710994, + "grad_norm": 0.5348480963637724, + "learning_rate": 1.4930115224617353e-06, + "loss": 1.1783, + "step": 2014 + }, + { + "epoch": 3.867978876620259, + "grad_norm": 0.5484480506754654, + "learning_rate": 1.4882381518993715e-06, + "loss": 1.0528, + "step": 2015 + }, + { + "epoch": 3.869899183869419, + "grad_norm": 0.531206762025555, + "learning_rate": 1.4834710895960968e-06, + "loss": 1.215, + "step": 2016 + }, + { + "epoch": 3.871819491118579, + "grad_norm": 0.5824525535683829, + "learning_rate": 1.4787103441150929e-06, + "loss": 1.1653, + "step": 2017 + }, + { + "epoch": 3.8737397983677386, + "grad_norm": 0.5278262294368417, + "learning_rate": 1.4739559240082001e-06, + "loss": 1.1414, + "step": 2018 + }, + { + "epoch": 3.8756601056168987, + "grad_norm": 0.6527349962795375, + "learning_rate": 1.4692078378158914e-06, + "loss": 1.2287, + "step": 2019 + }, + { + "epoch": 3.8775804128660587, + "grad_norm": 0.4799172739262922, + "learning_rate": 1.4644660940672628e-06, + "loss": 1.1643, + "step": 2020 + }, + { + "epoch": 3.8795007201152183, + "grad_norm": 0.5979288450459239, + "learning_rate": 1.4597307012800194e-06, + "loss": 1.2658, + "step": 2021 + }, + { + "epoch": 3.8814210273643783, + "grad_norm": 0.5381191205773753, + "learning_rate": 1.455001667960459e-06, + "loss": 1.2496, + "step": 2022 + }, + { + "epoch": 3.8833413346135384, + "grad_norm": 0.598223518930072, + "learning_rate": 1.4502790026034514e-06, + "loss": 1.2642, + "step": 2023 + }, + { + "epoch": 3.885261641862698, + "grad_norm": 0.5806550077821261, + "learning_rate": 1.4455627136924282e-06, + "loss": 1.2985, + "step": 2024 + }, + { + "epoch": 3.887181949111858, + "grad_norm": 0.5572775547548025, + "learning_rate": 1.4408528096993723e-06, + "loss": 1.249, + "step": 2025 + }, + { + "epoch": 3.889102256361018, + "grad_norm": 0.4972045087923691, + "learning_rate": 1.4361492990847892e-06, + "loss": 1.1279, + "step": 2026 + }, + { + "epoch": 3.8910225636101776, + "grad_norm": 0.5635770791098951, + "learning_rate": 1.4314521902977075e-06, + "loss": 1.1964, + "step": 2027 + }, + { + "epoch": 3.8929428708593377, + "grad_norm": 0.5473864584659733, + "learning_rate": 1.4267614917756495e-06, + "loss": 1.2068, + "step": 2028 + }, + { + "epoch": 3.8948631781084972, + "grad_norm": 0.5098734401636195, + "learning_rate": 1.4220772119446274e-06, + "loss": 1.2296, + "step": 2029 + }, + { + "epoch": 3.8967834853576573, + "grad_norm": 0.5537084211809192, + "learning_rate": 1.4173993592191199e-06, + "loss": 1.2231, + "step": 2030 + }, + { + "epoch": 3.898703792606817, + "grad_norm": 0.6411986351728786, + "learning_rate": 1.4127279420020646e-06, + "loss": 1.2949, + "step": 2031 + }, + { + "epoch": 3.900624099855977, + "grad_norm": 0.50857267026722, + "learning_rate": 1.4080629686848347e-06, + "loss": 1.1403, + "step": 2032 + }, + { + "epoch": 3.902544407105137, + "grad_norm": 0.5727136702262682, + "learning_rate": 1.403404447647228e-06, + "loss": 1.119, + "step": 2033 + }, + { + "epoch": 3.9044647143542965, + "grad_norm": 0.5430498992104051, + "learning_rate": 1.398752387257456e-06, + "loss": 1.1406, + "step": 2034 + }, + { + "epoch": 3.9063850216034566, + "grad_norm": 0.5742842814419868, + "learning_rate": 1.3941067958721232e-06, + "loss": 1.1942, + "step": 2035 + }, + { + "epoch": 3.9083053288526166, + "grad_norm": 0.5752698207102926, + "learning_rate": 1.3894676818362112e-06, + "loss": 1.2382, + "step": 2036 + }, + { + "epoch": 3.910225636101776, + "grad_norm": 0.5990062767667091, + "learning_rate": 1.3848350534830678e-06, + "loss": 1.2951, + "step": 2037 + }, + { + "epoch": 3.9121459433509362, + "grad_norm": 0.5431185572500612, + "learning_rate": 1.380208919134392e-06, + "loss": 1.3486, + "step": 2038 + }, + { + "epoch": 3.9140662506000963, + "grad_norm": 0.6109719079035386, + "learning_rate": 1.3755892871002142e-06, + "loss": 1.2008, + "step": 2039 + }, + { + "epoch": 3.915986557849256, + "grad_norm": 0.5498834001741615, + "learning_rate": 1.3709761656788884e-06, + "loss": 1.2221, + "step": 2040 + }, + { + "epoch": 3.917906865098416, + "grad_norm": 0.5243755668707245, + "learning_rate": 1.3663695631570685e-06, + "loss": 1.1233, + "step": 2041 + }, + { + "epoch": 3.9198271723475755, + "grad_norm": 0.6211570354938094, + "learning_rate": 1.3617694878097048e-06, + "loss": 1.1292, + "step": 2042 + }, + { + "epoch": 3.9217474795967355, + "grad_norm": 0.5289293962291077, + "learning_rate": 1.3571759479000157e-06, + "loss": 1.2022, + "step": 2043 + }, + { + "epoch": 3.923667786845895, + "grad_norm": 0.5237302377911837, + "learning_rate": 1.3525889516794865e-06, + "loss": 1.1261, + "step": 2044 + }, + { + "epoch": 3.925588094095055, + "grad_norm": 0.5864125079769741, + "learning_rate": 1.348008507387843e-06, + "loss": 1.1921, + "step": 2045 + }, + { + "epoch": 3.927508401344215, + "grad_norm": 0.5132372078334463, + "learning_rate": 1.3434346232530416e-06, + "loss": 1.2153, + "step": 2046 + }, + { + "epoch": 3.929428708593375, + "grad_norm": 0.5736687130196049, + "learning_rate": 1.3388673074912578e-06, + "loss": 1.1235, + "step": 2047 + }, + { + "epoch": 3.931349015842535, + "grad_norm": 0.5537299456275703, + "learning_rate": 1.334306568306869e-06, + "loss": 1.1459, + "step": 2048 + }, + { + "epoch": 3.933269323091695, + "grad_norm": 0.6396087899814517, + "learning_rate": 1.329752413892434e-06, + "loss": 1.0667, + "step": 2049 + }, + { + "epoch": 3.9351896303408544, + "grad_norm": 0.5162023888903375, + "learning_rate": 1.3252048524286843e-06, + "loss": 1.2627, + "step": 2050 + }, + { + "epoch": 3.9371099375900145, + "grad_norm": 0.5581422475497699, + "learning_rate": 1.3206638920845134e-06, + "loss": 1.185, + "step": 2051 + }, + { + "epoch": 3.9390302448391745, + "grad_norm": 0.6382977115111219, + "learning_rate": 1.31612954101695e-06, + "loss": 1.2529, + "step": 2052 + }, + { + "epoch": 3.940950552088334, + "grad_norm": 0.6978036292098505, + "learning_rate": 1.3116018073711573e-06, + "loss": 1.2685, + "step": 2053 + }, + { + "epoch": 3.942870859337494, + "grad_norm": 0.5286605456776363, + "learning_rate": 1.3070806992804047e-06, + "loss": 1.2334, + "step": 2054 + }, + { + "epoch": 3.9447911665866537, + "grad_norm": 0.5366867883451, + "learning_rate": 1.302566224866067e-06, + "loss": 1.1021, + "step": 2055 + }, + { + "epoch": 3.9467114738358138, + "grad_norm": 0.5388309407024658, + "learning_rate": 1.298058392237595e-06, + "loss": 1.067, + "step": 2056 + }, + { + "epoch": 3.9486317810849734, + "grad_norm": 0.519649218838344, + "learning_rate": 1.2935572094925159e-06, + "loss": 1.2204, + "step": 2057 + }, + { + "epoch": 3.9505520883341334, + "grad_norm": 0.4922876066919291, + "learning_rate": 1.2890626847164078e-06, + "loss": 1.0572, + "step": 2058 + }, + { + "epoch": 3.9524723955832934, + "grad_norm": 0.5748331626775529, + "learning_rate": 1.2845748259828873e-06, + "loss": 1.1073, + "step": 2059 + }, + { + "epoch": 3.954392702832453, + "grad_norm": 0.5621521219458596, + "learning_rate": 1.2800936413536008e-06, + "loss": 1.1677, + "step": 2060 + }, + { + "epoch": 3.956313010081613, + "grad_norm": 0.5357387494068064, + "learning_rate": 1.2756191388782058e-06, + "loss": 1.2472, + "step": 2061 + }, + { + "epoch": 3.958233317330773, + "grad_norm": 0.5878654973552593, + "learning_rate": 1.271151326594352e-06, + "loss": 1.1352, + "step": 2062 + }, + { + "epoch": 3.9601536245799327, + "grad_norm": 0.5054296588933348, + "learning_rate": 1.2666902125276737e-06, + "loss": 1.1019, + "step": 2063 + }, + { + "epoch": 3.9620739318290927, + "grad_norm": 0.5565431108905633, + "learning_rate": 1.262235804691776e-06, + "loss": 1.154, + "step": 2064 + }, + { + "epoch": 3.9639942390782528, + "grad_norm": 0.6022839501107193, + "learning_rate": 1.2577881110882117e-06, + "loss": 1.2353, + "step": 2065 + }, + { + "epoch": 3.9659145463274124, + "grad_norm": 0.56120091860061, + "learning_rate": 1.2533471397064783e-06, + "loss": 1.2709, + "step": 2066 + }, + { + "epoch": 3.9678348535765724, + "grad_norm": 0.5343669306204547, + "learning_rate": 1.2489128985239934e-06, + "loss": 1.1549, + "step": 2067 + }, + { + "epoch": 3.969755160825732, + "grad_norm": 0.5283700549683216, + "learning_rate": 1.2444853955060899e-06, + "loss": 1.1503, + "step": 2068 + }, + { + "epoch": 3.971675468074892, + "grad_norm": 0.5496494205326452, + "learning_rate": 1.24006463860599e-06, + "loss": 1.2555, + "step": 2069 + }, + { + "epoch": 3.9735957753240516, + "grad_norm": 0.5474360076766185, + "learning_rate": 1.2356506357648058e-06, + "loss": 1.2204, + "step": 2070 + }, + { + "epoch": 3.9755160825732117, + "grad_norm": 0.5428330647570867, + "learning_rate": 1.2312433949115104e-06, + "loss": 1.3571, + "step": 2071 + }, + { + "epoch": 3.9774363898223717, + "grad_norm": 0.5256410138713948, + "learning_rate": 1.2268429239629314e-06, + "loss": 1.1495, + "step": 2072 + }, + { + "epoch": 3.9793566970715313, + "grad_norm": 0.5289728001792591, + "learning_rate": 1.2224492308237385e-06, + "loss": 1.1433, + "step": 2073 + }, + { + "epoch": 3.9812770043206913, + "grad_norm": 0.5485202282175305, + "learning_rate": 1.2180623233864254e-06, + "loss": 1.1125, + "step": 2074 + }, + { + "epoch": 3.9831973115698514, + "grad_norm": 0.5452802737853608, + "learning_rate": 1.2136822095312934e-06, + "loss": 1.3543, + "step": 2075 + }, + { + "epoch": 3.985117618819011, + "grad_norm": 0.6038504809675889, + "learning_rate": 1.209308897126442e-06, + "loss": 1.2433, + "step": 2076 + }, + { + "epoch": 3.987037926068171, + "grad_norm": 0.5873901859081604, + "learning_rate": 1.2049423940277566e-06, + "loss": 1.1735, + "step": 2077 + }, + { + "epoch": 3.988958233317331, + "grad_norm": 0.5689176172933589, + "learning_rate": 1.2005827080788835e-06, + "loss": 1.2113, + "step": 2078 + }, + { + "epoch": 3.9908785405664906, + "grad_norm": 0.5319031899173643, + "learning_rate": 1.1962298471112316e-06, + "loss": 1.2786, + "step": 2079 + }, + { + "epoch": 3.9927988478156506, + "grad_norm": 0.5404983261788944, + "learning_rate": 1.1918838189439426e-06, + "loss": 1.3087, + "step": 2080 + }, + { + "epoch": 3.9947191550648102, + "grad_norm": 0.5261232551570874, + "learning_rate": 1.1875446313838896e-06, + "loss": 1.0908, + "step": 2081 + }, + { + "epoch": 3.9966394623139703, + "grad_norm": 0.5418706567849054, + "learning_rate": 1.1832122922256539e-06, + "loss": 1.0845, + "step": 2082 + }, + { + "epoch": 3.99855976956313, + "grad_norm": 0.5305513436253771, + "learning_rate": 1.1788868092515177e-06, + "loss": 1.2085, + "step": 2083 + }, + { + "epoch": 4.0, + "grad_norm": 0.766849236423649, + "learning_rate": 1.1745681902314481e-06, + "loss": 1.2163, + "step": 2084 + }, + { + "epoch": 4.00192030724916, + "grad_norm": 0.5548042214865834, + "learning_rate": 1.1702564429230762e-06, + "loss": 1.3826, + "step": 2085 + }, + { + "epoch": 4.00384061449832, + "grad_norm": 0.5626463848803772, + "learning_rate": 1.1659515750716953e-06, + "loss": 1.1131, + "step": 2086 + }, + { + "epoch": 4.00576092174748, + "grad_norm": 0.5675139173821863, + "learning_rate": 1.1616535944102414e-06, + "loss": 1.2145, + "step": 2087 + }, + { + "epoch": 4.007681228996639, + "grad_norm": 0.5723186581636632, + "learning_rate": 1.1573625086592744e-06, + "loss": 1.23, + "step": 2088 + }, + { + "epoch": 4.0096015362458, + "grad_norm": 0.559627260467411, + "learning_rate": 1.1530783255269695e-06, + "loss": 1.3913, + "step": 2089 + }, + { + "epoch": 4.011521843494959, + "grad_norm": 0.5778044774113282, + "learning_rate": 1.1488010527091075e-06, + "loss": 1.2374, + "step": 2090 + }, + { + "epoch": 4.013442150744119, + "grad_norm": 0.5645299779463526, + "learning_rate": 1.1445306978890497e-06, + "loss": 1.1326, + "step": 2091 + }, + { + "epoch": 4.0153624579932785, + "grad_norm": 0.5830410705425324, + "learning_rate": 1.1402672687377341e-06, + "loss": 1.2434, + "step": 2092 + }, + { + "epoch": 4.017282765242439, + "grad_norm": 0.585032799962917, + "learning_rate": 1.1360107729136588e-06, + "loss": 1.2532, + "step": 2093 + }, + { + "epoch": 4.019203072491599, + "grad_norm": 0.5508477645536923, + "learning_rate": 1.1317612180628645e-06, + "loss": 1.1834, + "step": 2094 + }, + { + "epoch": 4.021123379740758, + "grad_norm": 0.5102291404223083, + "learning_rate": 1.127518611818924e-06, + "loss": 1.1747, + "step": 2095 + }, + { + "epoch": 4.023043686989919, + "grad_norm": 0.5438794224760153, + "learning_rate": 1.1232829618029295e-06, + "loss": 1.2984, + "step": 2096 + }, + { + "epoch": 4.024963994239078, + "grad_norm": 0.5079065657583147, + "learning_rate": 1.1190542756234807e-06, + "loss": 1.1486, + "step": 2097 + }, + { + "epoch": 4.026884301488238, + "grad_norm": 0.5211658285542289, + "learning_rate": 1.1148325608766586e-06, + "loss": 1.2748, + "step": 2098 + }, + { + "epoch": 4.028804608737398, + "grad_norm": 0.6048473167997156, + "learning_rate": 1.1106178251460297e-06, + "loss": 1.2832, + "step": 2099 + }, + { + "epoch": 4.030724915986558, + "grad_norm": 0.543668054723494, + "learning_rate": 1.106410076002623e-06, + "loss": 1.2654, + "step": 2100 + }, + { + "epoch": 4.0326452232357175, + "grad_norm": 0.5844882534560377, + "learning_rate": 1.1022093210049135e-06, + "loss": 1.4156, + "step": 2101 + }, + { + "epoch": 4.034565530484878, + "grad_norm": 0.5244674562596424, + "learning_rate": 1.0980155676988159e-06, + "loss": 1.1169, + "step": 2102 + }, + { + "epoch": 4.036485837734038, + "grad_norm": 0.5247260988529615, + "learning_rate": 1.0938288236176648e-06, + "loss": 1.2026, + "step": 2103 + }, + { + "epoch": 4.038406144983197, + "grad_norm": 0.5178265331477909, + "learning_rate": 1.0896490962822082e-06, + "loss": 1.1544, + "step": 2104 + }, + { + "epoch": 4.040326452232357, + "grad_norm": 0.6054984839404333, + "learning_rate": 1.0854763932005836e-06, + "loss": 1.1505, + "step": 2105 + }, + { + "epoch": 4.042246759481517, + "grad_norm": 0.6232407787161932, + "learning_rate": 1.0813107218683171e-06, + "loss": 1.2573, + "step": 2106 + }, + { + "epoch": 4.044167066730677, + "grad_norm": 0.5737104325287904, + "learning_rate": 1.077152089768299e-06, + "loss": 1.3554, + "step": 2107 + }, + { + "epoch": 4.046087373979836, + "grad_norm": 0.509356037515022, + "learning_rate": 1.0730005043707765e-06, + "loss": 1.1599, + "step": 2108 + }, + { + "epoch": 4.048007681228997, + "grad_norm": 0.5513124394173246, + "learning_rate": 1.0688559731333375e-06, + "loss": 1.2687, + "step": 2109 + }, + { + "epoch": 4.0499279884781565, + "grad_norm": 0.5323347378209177, + "learning_rate": 1.064718503500904e-06, + "loss": 1.1271, + "step": 2110 + }, + { + "epoch": 4.051848295727316, + "grad_norm": 0.5454433853412913, + "learning_rate": 1.0605881029057024e-06, + "loss": 1.1456, + "step": 2111 + }, + { + "epoch": 4.053768602976477, + "grad_norm": 0.5722576829877537, + "learning_rate": 1.0564647787672694e-06, + "loss": 1.3032, + "step": 2112 + }, + { + "epoch": 4.055688910225636, + "grad_norm": 0.5180903082236386, + "learning_rate": 1.0523485384924293e-06, + "loss": 1.1314, + "step": 2113 + }, + { + "epoch": 4.057609217474796, + "grad_norm": 0.5908432231252306, + "learning_rate": 1.0482393894752764e-06, + "loss": 1.2573, + "step": 2114 + }, + { + "epoch": 4.059529524723956, + "grad_norm": 0.5959538730262208, + "learning_rate": 1.0441373390971736e-06, + "loss": 1.0947, + "step": 2115 + }, + { + "epoch": 4.061449831973116, + "grad_norm": 0.5460311560029719, + "learning_rate": 1.0400423947267264e-06, + "loss": 1.1514, + "step": 2116 + }, + { + "epoch": 4.063370139222275, + "grad_norm": 0.5022025194212012, + "learning_rate": 1.03595456371978e-06, + "loss": 1.1893, + "step": 2117 + }, + { + "epoch": 4.065290446471435, + "grad_norm": 0.5267320534907937, + "learning_rate": 1.031873853419398e-06, + "loss": 1.1004, + "step": 2118 + }, + { + "epoch": 4.0672107537205955, + "grad_norm": 0.5640051650456994, + "learning_rate": 1.0278002711558566e-06, + "loss": 1.1812, + "step": 2119 + }, + { + "epoch": 4.069131060969755, + "grad_norm": 0.5685521273711498, + "learning_rate": 1.0237338242466254e-06, + "loss": 1.1922, + "step": 2120 + }, + { + "epoch": 4.071051368218915, + "grad_norm": 0.5735634612643715, + "learning_rate": 1.0196745199963553e-06, + "loss": 1.161, + "step": 2121 + }, + { + "epoch": 4.072971675468075, + "grad_norm": 0.5673466833054086, + "learning_rate": 1.0156223656968695e-06, + "loss": 1.2359, + "step": 2122 + }, + { + "epoch": 4.074891982717235, + "grad_norm": 0.5989360098994925, + "learning_rate": 1.0115773686271485e-06, + "loss": 1.2723, + "step": 2123 + }, + { + "epoch": 4.076812289966394, + "grad_norm": 0.5874827946508973, + "learning_rate": 1.007539536053313e-06, + "loss": 1.1744, + "step": 2124 + }, + { + "epoch": 4.078732597215555, + "grad_norm": 0.5141224290797225, + "learning_rate": 1.0035088752286133e-06, + "loss": 1.1343, + "step": 2125 + }, + { + "epoch": 4.080652904464714, + "grad_norm": 0.5411066045128655, + "learning_rate": 9.994853933934212e-07, + "loss": 1.319, + "step": 2126 + }, + { + "epoch": 4.082573211713874, + "grad_norm": 0.5079636700227997, + "learning_rate": 9.95469097775208e-07, + "loss": 1.2185, + "step": 2127 + }, + { + "epoch": 4.0844935189630345, + "grad_norm": 0.6100033251191618, + "learning_rate": 9.914599955885407e-07, + "loss": 1.1767, + "step": 2128 + }, + { + "epoch": 4.086413826212194, + "grad_norm": 0.509425849428369, + "learning_rate": 9.874580940350603e-07, + "loss": 1.2417, + "step": 2129 + }, + { + "epoch": 4.088334133461354, + "grad_norm": 0.4916359697046092, + "learning_rate": 9.834634003034777e-07, + "loss": 1.2542, + "step": 2130 + }, + { + "epoch": 4.090254440710513, + "grad_norm": 0.5257100418200412, + "learning_rate": 9.79475921569551e-07, + "loss": 1.2765, + "step": 2131 + }, + { + "epoch": 4.092174747959674, + "grad_norm": 0.5743793764104801, + "learning_rate": 9.754956649960823e-07, + "loss": 1.2569, + "step": 2132 + }, + { + "epoch": 4.094095055208833, + "grad_norm": 0.5346036908601735, + "learning_rate": 9.71522637732899e-07, + "loss": 1.2283, + "step": 2133 + }, + { + "epoch": 4.096015362457993, + "grad_norm": 0.6004976679118316, + "learning_rate": 9.675568469168388e-07, + "loss": 1.3012, + "step": 2134 + }, + { + "epoch": 4.097935669707153, + "grad_norm": 0.5696041623446196, + "learning_rate": 9.635982996717463e-07, + "loss": 1.1894, + "step": 2135 + }, + { + "epoch": 4.099855976956313, + "grad_norm": 0.5660879014128902, + "learning_rate": 9.59647003108452e-07, + "loss": 1.2535, + "step": 2136 + }, + { + "epoch": 4.101776284205473, + "grad_norm": 0.6127125968557959, + "learning_rate": 9.557029643247607e-07, + "loss": 1.2438, + "step": 2137 + }, + { + "epoch": 4.103696591454633, + "grad_norm": 0.5266990191971826, + "learning_rate": 9.517661904054387e-07, + "loss": 1.1389, + "step": 2138 + }, + { + "epoch": 4.105616898703793, + "grad_norm": 0.5708530769479147, + "learning_rate": 9.478366884222068e-07, + "loss": 1.2989, + "step": 2139 + }, + { + "epoch": 4.107537205952952, + "grad_norm": 0.6175219685416801, + "learning_rate": 9.439144654337179e-07, + "loss": 1.3033, + "step": 2140 + }, + { + "epoch": 4.109457513202113, + "grad_norm": 0.5260139983994795, + "learning_rate": 9.39999528485554e-07, + "loss": 1.2472, + "step": 2141 + }, + { + "epoch": 4.111377820451272, + "grad_norm": 0.5690743942444327, + "learning_rate": 9.360918846102057e-07, + "loss": 1.269, + "step": 2142 + }, + { + "epoch": 4.113298127700432, + "grad_norm": 0.5065031715834668, + "learning_rate": 9.321915408270654e-07, + "loss": 1.167, + "step": 2143 + }, + { + "epoch": 4.1152184349495915, + "grad_norm": 0.5144262454570739, + "learning_rate": 9.282985041424086e-07, + "loss": 1.0917, + "step": 2144 + }, + { + "epoch": 4.117138742198752, + "grad_norm": 0.5793044183501742, + "learning_rate": 9.244127815493903e-07, + "loss": 1.1721, + "step": 2145 + }, + { + "epoch": 4.119059049447912, + "grad_norm": 0.5378251140768435, + "learning_rate": 9.20534380028022e-07, + "loss": 1.2301, + "step": 2146 + }, + { + "epoch": 4.120979356697071, + "grad_norm": 0.5318061066109855, + "learning_rate": 9.166633065451658e-07, + "loss": 1.2477, + "step": 2147 + }, + { + "epoch": 4.122899663946232, + "grad_norm": 0.5094826712087792, + "learning_rate": 9.127995680545204e-07, + "loss": 1.0912, + "step": 2148 + }, + { + "epoch": 4.124819971195391, + "grad_norm": 0.5074930625856107, + "learning_rate": 9.089431714966113e-07, + "loss": 1.2343, + "step": 2149 + }, + { + "epoch": 4.126740278444551, + "grad_norm": 0.5108656774470458, + "learning_rate": 9.050941237987709e-07, + "loss": 1.2341, + "step": 2150 + }, + { + "epoch": 4.128660585693711, + "grad_norm": 0.5148522123450382, + "learning_rate": 9.01252431875132e-07, + "loss": 1.1856, + "step": 2151 + }, + { + "epoch": 4.130580892942871, + "grad_norm": 0.5289470413037903, + "learning_rate": 8.974181026266165e-07, + "loss": 1.1956, + "step": 2152 + }, + { + "epoch": 4.1325012001920305, + "grad_norm": 0.5221921816612622, + "learning_rate": 8.935911429409167e-07, + "loss": 1.3003, + "step": 2153 + }, + { + "epoch": 4.134421507441191, + "grad_norm": 0.48889820129372236, + "learning_rate": 8.89771559692491e-07, + "loss": 1.1529, + "step": 2154 + }, + { + "epoch": 4.136341814690351, + "grad_norm": 0.6116738020186058, + "learning_rate": 8.859593597425431e-07, + "loss": 1.3111, + "step": 2155 + }, + { + "epoch": 4.13826212193951, + "grad_norm": 0.5766513502753049, + "learning_rate": 8.821545499390183e-07, + "loss": 1.2618, + "step": 2156 + }, + { + "epoch": 4.14018242918867, + "grad_norm": 0.5370940521762076, + "learning_rate": 8.783571371165822e-07, + "loss": 1.1673, + "step": 2157 + }, + { + "epoch": 4.14210273643783, + "grad_norm": 0.5220574627655491, + "learning_rate": 8.745671280966178e-07, + "loss": 1.1518, + "step": 2158 + }, + { + "epoch": 4.14402304368699, + "grad_norm": 0.5077879564857161, + "learning_rate": 8.707845296872047e-07, + "loss": 1.1825, + "step": 2159 + }, + { + "epoch": 4.145943350936149, + "grad_norm": 0.5791514081232271, + "learning_rate": 8.670093486831105e-07, + "loss": 1.3198, + "step": 2160 + }, + { + "epoch": 4.14786365818531, + "grad_norm": 0.5380988022440616, + "learning_rate": 8.632415918657822e-07, + "loss": 1.0622, + "step": 2161 + }, + { + "epoch": 4.1497839654344695, + "grad_norm": 0.5531209068897226, + "learning_rate": 8.594812660033286e-07, + "loss": 1.1591, + "step": 2162 + }, + { + "epoch": 4.151704272683629, + "grad_norm": 0.49183093810325107, + "learning_rate": 8.557283778505098e-07, + "loss": 1.1483, + "step": 2163 + }, + { + "epoch": 4.1536245799327896, + "grad_norm": 0.6066798154037578, + "learning_rate": 8.51982934148724e-07, + "loss": 1.3077, + "step": 2164 + }, + { + "epoch": 4.155544887181949, + "grad_norm": 0.5517638342967128, + "learning_rate": 8.482449416260013e-07, + "loss": 1.2497, + "step": 2165 + }, + { + "epoch": 4.157465194431109, + "grad_norm": 0.5180995710841257, + "learning_rate": 8.445144069969813e-07, + "loss": 1.0429, + "step": 2166 + }, + { + "epoch": 4.159385501680269, + "grad_norm": 0.4967973065246972, + "learning_rate": 8.407913369629129e-07, + "loss": 1.1665, + "step": 2167 + }, + { + "epoch": 4.161305808929429, + "grad_norm": 0.5220312960542267, + "learning_rate": 8.37075738211629e-07, + "loss": 1.0776, + "step": 2168 + }, + { + "epoch": 4.163226116178588, + "grad_norm": 0.5264911014100475, + "learning_rate": 8.333676174175498e-07, + "loss": 1.242, + "step": 2169 + }, + { + "epoch": 4.165146423427748, + "grad_norm": 0.5266378834904261, + "learning_rate": 8.296669812416546e-07, + "loss": 1.1499, + "step": 2170 + }, + { + "epoch": 4.1670667306769085, + "grad_norm": 0.5125193782511146, + "learning_rate": 8.25973836331485e-07, + "loss": 1.1976, + "step": 2171 + }, + { + "epoch": 4.168987037926068, + "grad_norm": 0.5413532620151125, + "learning_rate": 8.222881893211221e-07, + "loss": 1.1738, + "step": 2172 + }, + { + "epoch": 4.170907345175228, + "grad_norm": 0.6052542060538894, + "learning_rate": 8.186100468311764e-07, + "loss": 1.2848, + "step": 2173 + }, + { + "epoch": 4.172827652424388, + "grad_norm": 0.534142536441139, + "learning_rate": 8.149394154687823e-07, + "loss": 1.2285, + "step": 2174 + }, + { + "epoch": 4.174747959673548, + "grad_norm": 0.5645812770550667, + "learning_rate": 8.11276301827581e-07, + "loss": 1.1789, + "step": 2175 + }, + { + "epoch": 4.176668266922707, + "grad_norm": 0.5137909122692267, + "learning_rate": 8.076207124877067e-07, + "loss": 1.1103, + "step": 2176 + }, + { + "epoch": 4.178588574171868, + "grad_norm": 0.6562549627659681, + "learning_rate": 8.039726540157788e-07, + "loss": 1.1595, + "step": 2177 + }, + { + "epoch": 4.180508881421027, + "grad_norm": 0.6036063486008777, + "learning_rate": 8.003321329648911e-07, + "loss": 1.1992, + "step": 2178 + }, + { + "epoch": 4.182429188670187, + "grad_norm": 0.5867954615596982, + "learning_rate": 7.966991558745934e-07, + "loss": 1.1678, + "step": 2179 + }, + { + "epoch": 4.1843494959193475, + "grad_norm": 0.548930948135468, + "learning_rate": 7.930737292708889e-07, + "loss": 1.2678, + "step": 2180 + }, + { + "epoch": 4.186269803168507, + "grad_norm": 0.5307324615693798, + "learning_rate": 7.894558596662128e-07, + "loss": 1.1983, + "step": 2181 + }, + { + "epoch": 4.188190110417667, + "grad_norm": 0.5196222559185809, + "learning_rate": 7.858455535594306e-07, + "loss": 1.1042, + "step": 2182 + }, + { + "epoch": 4.190110417666826, + "grad_norm": 0.563622500740079, + "learning_rate": 7.822428174358166e-07, + "loss": 1.2545, + "step": 2183 + }, + { + "epoch": 4.192030724915987, + "grad_norm": 0.5273198786965761, + "learning_rate": 7.786476577670509e-07, + "loss": 1.1169, + "step": 2184 + }, + { + "epoch": 4.193951032165146, + "grad_norm": 0.5740310160065366, + "learning_rate": 7.750600810112013e-07, + "loss": 1.3098, + "step": 2185 + }, + { + "epoch": 4.195871339414306, + "grad_norm": 0.5300363778982393, + "learning_rate": 7.714800936127137e-07, + "loss": 1.1398, + "step": 2186 + }, + { + "epoch": 4.197791646663466, + "grad_norm": 0.5153568606775265, + "learning_rate": 7.679077020024034e-07, + "loss": 1.0479, + "step": 2187 + }, + { + "epoch": 4.199711953912626, + "grad_norm": 0.5064438114495996, + "learning_rate": 7.643429125974411e-07, + "loss": 1.053, + "step": 2188 + }, + { + "epoch": 4.201632261161786, + "grad_norm": 0.543475787703253, + "learning_rate": 7.607857318013395e-07, + "loss": 1.2769, + "step": 2189 + }, + { + "epoch": 4.203552568410946, + "grad_norm": 0.5459633840938524, + "learning_rate": 7.572361660039434e-07, + "loss": 1.2206, + "step": 2190 + }, + { + "epoch": 4.205472875660106, + "grad_norm": 0.5832958927589873, + "learning_rate": 7.53694221581423e-07, + "loss": 1.2239, + "step": 2191 + }, + { + "epoch": 4.207393182909265, + "grad_norm": 0.5757085716950973, + "learning_rate": 7.501599048962527e-07, + "loss": 1.3014, + "step": 2192 + }, + { + "epoch": 4.209313490158426, + "grad_norm": 0.6097763427172184, + "learning_rate": 7.466332222972084e-07, + "loss": 1.1882, + "step": 2193 + }, + { + "epoch": 4.211233797407585, + "grad_norm": 0.5151694942898289, + "learning_rate": 7.431141801193509e-07, + "loss": 1.0796, + "step": 2194 + }, + { + "epoch": 4.213154104656745, + "grad_norm": 0.5584993426145137, + "learning_rate": 7.396027846840187e-07, + "loss": 1.1202, + "step": 2195 + }, + { + "epoch": 4.215074411905905, + "grad_norm": 0.5280339873972688, + "learning_rate": 7.360990422988101e-07, + "loss": 1.1195, + "step": 2196 + }, + { + "epoch": 4.216994719155065, + "grad_norm": 0.5363682389437734, + "learning_rate": 7.326029592575818e-07, + "loss": 1.2396, + "step": 2197 + }, + { + "epoch": 4.2189150264042246, + "grad_norm": 0.5298082695548109, + "learning_rate": 7.291145418404272e-07, + "loss": 1.3707, + "step": 2198 + }, + { + "epoch": 4.220835333653384, + "grad_norm": 0.5371223470465505, + "learning_rate": 7.256337963136695e-07, + "loss": 0.9652, + "step": 2199 + }, + { + "epoch": 4.222755640902545, + "grad_norm": 0.5443508992282282, + "learning_rate": 7.221607289298538e-07, + "loss": 1.2117, + "step": 2200 + }, + { + "epoch": 4.224675948151704, + "grad_norm": 0.5721439494390302, + "learning_rate": 7.186953459277335e-07, + "loss": 1.2383, + "step": 2201 + }, + { + "epoch": 4.226596255400864, + "grad_norm": 0.630529057268932, + "learning_rate": 7.152376535322542e-07, + "loss": 1.2418, + "step": 2202 + }, + { + "epoch": 4.228516562650024, + "grad_norm": 0.5088536490251296, + "learning_rate": 7.117876579545479e-07, + "loss": 1.1693, + "step": 2203 + }, + { + "epoch": 4.230436869899184, + "grad_norm": 0.593574907319419, + "learning_rate": 7.083453653919237e-07, + "loss": 1.3417, + "step": 2204 + }, + { + "epoch": 4.2323571771483435, + "grad_norm": 0.5033206591873223, + "learning_rate": 7.049107820278489e-07, + "loss": 1.2132, + "step": 2205 + }, + { + "epoch": 4.234277484397504, + "grad_norm": 0.5773746153343428, + "learning_rate": 7.014839140319485e-07, + "loss": 1.26, + "step": 2206 + }, + { + "epoch": 4.2361977916466635, + "grad_norm": 0.5685665347844662, + "learning_rate": 6.980647675599811e-07, + "loss": 1.3106, + "step": 2207 + }, + { + "epoch": 4.238118098895823, + "grad_norm": 0.5258178718973746, + "learning_rate": 6.946533487538415e-07, + "loss": 1.226, + "step": 2208 + }, + { + "epoch": 4.240038406144984, + "grad_norm": 0.5750693209533568, + "learning_rate": 6.912496637415384e-07, + "loss": 1.1923, + "step": 2209 + }, + { + "epoch": 4.241958713394143, + "grad_norm": 0.5515416530676441, + "learning_rate": 6.878537186371914e-07, + "loss": 1.3254, + "step": 2210 + }, + { + "epoch": 4.243879020643303, + "grad_norm": 0.5503743728093845, + "learning_rate": 6.844655195410149e-07, + "loss": 1.1813, + "step": 2211 + }, + { + "epoch": 4.245799327892462, + "grad_norm": 0.5627264118872891, + "learning_rate": 6.810850725393081e-07, + "loss": 1.2767, + "step": 2212 + }, + { + "epoch": 4.247719635141623, + "grad_norm": 0.5323000340626918, + "learning_rate": 6.777123837044469e-07, + "loss": 1.3399, + "step": 2213 + }, + { + "epoch": 4.2496399423907825, + "grad_norm": 0.5224301626419067, + "learning_rate": 6.743474590948718e-07, + "loss": 1.1627, + "step": 2214 + }, + { + "epoch": 4.251560249639942, + "grad_norm": 0.5164393518628299, + "learning_rate": 6.709903047550736e-07, + "loss": 1.2506, + "step": 2215 + }, + { + "epoch": 4.2534805568891025, + "grad_norm": 0.5116170882656189, + "learning_rate": 6.676409267155847e-07, + "loss": 1.2367, + "step": 2216 + }, + { + "epoch": 4.255400864138262, + "grad_norm": 0.5538315679501662, + "learning_rate": 6.642993309929735e-07, + "loss": 1.2173, + "step": 2217 + }, + { + "epoch": 4.257321171387422, + "grad_norm": 0.5536619080466649, + "learning_rate": 6.609655235898227e-07, + "loss": 1.1313, + "step": 2218 + }, + { + "epoch": 4.259241478636582, + "grad_norm": 0.5068429068468964, + "learning_rate": 6.576395104947308e-07, + "loss": 1.1928, + "step": 2219 + }, + { + "epoch": 4.261161785885742, + "grad_norm": 0.6286574970251667, + "learning_rate": 6.543212976822894e-07, + "loss": 1.176, + "step": 2220 + }, + { + "epoch": 4.263082093134901, + "grad_norm": 0.5322666263785315, + "learning_rate": 6.510108911130825e-07, + "loss": 1.2838, + "step": 2221 + }, + { + "epoch": 4.265002400384062, + "grad_norm": 0.5257192844519896, + "learning_rate": 6.47708296733669e-07, + "loss": 1.2932, + "step": 2222 + }, + { + "epoch": 4.2669227076332215, + "grad_norm": 0.5034823968523833, + "learning_rate": 6.444135204765761e-07, + "loss": 1.1928, + "step": 2223 + }, + { + "epoch": 4.268843014882381, + "grad_norm": 0.4959340866925871, + "learning_rate": 6.411265682602891e-07, + "loss": 1.2779, + "step": 2224 + }, + { + "epoch": 4.270763322131541, + "grad_norm": 0.5588545068362047, + "learning_rate": 6.37847445989232e-07, + "loss": 1.1419, + "step": 2225 + }, + { + "epoch": 4.272683629380701, + "grad_norm": 0.5420430470920086, + "learning_rate": 6.345761595537698e-07, + "loss": 1.1355, + "step": 2226 + }, + { + "epoch": 4.274603936629861, + "grad_norm": 0.5455842818327883, + "learning_rate": 6.313127148301912e-07, + "loss": 1.2221, + "step": 2227 + }, + { + "epoch": 4.27652424387902, + "grad_norm": 0.5547383681327501, + "learning_rate": 6.280571176806971e-07, + "loss": 1.3036, + "step": 2228 + }, + { + "epoch": 4.278444551128181, + "grad_norm": 0.5295210278180471, + "learning_rate": 6.248093739533901e-07, + "loss": 1.26, + "step": 2229 + }, + { + "epoch": 4.28036485837734, + "grad_norm": 0.5638514452022787, + "learning_rate": 6.215694894822699e-07, + "loss": 1.1689, + "step": 2230 + }, + { + "epoch": 4.2822851656265, + "grad_norm": 0.500168136718917, + "learning_rate": 6.183374700872142e-07, + "loss": 1.2644, + "step": 2231 + }, + { + "epoch": 4.2842054728756604, + "grad_norm": 0.5546679433434593, + "learning_rate": 6.151133215739752e-07, + "loss": 1.1119, + "step": 2232 + }, + { + "epoch": 4.28612578012482, + "grad_norm": 0.5116747370503683, + "learning_rate": 6.118970497341648e-07, + "loss": 1.2413, + "step": 2233 + }, + { + "epoch": 4.28804608737398, + "grad_norm": 0.5980091089903843, + "learning_rate": 6.08688660345248e-07, + "loss": 1.1936, + "step": 2234 + }, + { + "epoch": 4.28996639462314, + "grad_norm": 0.5321703956764452, + "learning_rate": 6.05488159170527e-07, + "loss": 1.1472, + "step": 2235 + }, + { + "epoch": 4.2918867018723, + "grad_norm": 0.539991324716268, + "learning_rate": 6.022955519591367e-07, + "loss": 1.1932, + "step": 2236 + }, + { + "epoch": 4.293807009121459, + "grad_norm": 0.5099620152867691, + "learning_rate": 5.991108444460336e-07, + "loss": 1.1725, + "step": 2237 + }, + { + "epoch": 4.295727316370619, + "grad_norm": 0.5279342625363437, + "learning_rate": 5.959340423519777e-07, + "loss": 1.2307, + "step": 2238 + }, + { + "epoch": 4.297647623619779, + "grad_norm": 0.586817116315889, + "learning_rate": 5.927651513835342e-07, + "loss": 1.2239, + "step": 2239 + }, + { + "epoch": 4.299567930868939, + "grad_norm": 0.5110559736126249, + "learning_rate": 5.896041772330558e-07, + "loss": 1.1683, + "step": 2240 + }, + { + "epoch": 4.3014882381180986, + "grad_norm": 0.5296609367525358, + "learning_rate": 5.864511255786725e-07, + "loss": 1.2942, + "step": 2241 + }, + { + "epoch": 4.303408545367259, + "grad_norm": 0.5590100588895419, + "learning_rate": 5.83306002084284e-07, + "loss": 1.2532, + "step": 2242 + }, + { + "epoch": 4.305328852616419, + "grad_norm": 0.5012659883169898, + "learning_rate": 5.801688123995486e-07, + "loss": 1.1434, + "step": 2243 + }, + { + "epoch": 4.307249159865578, + "grad_norm": 0.535589571581107, + "learning_rate": 5.770395621598734e-07, + "loss": 1.1685, + "step": 2244 + }, + { + "epoch": 4.309169467114739, + "grad_norm": 0.5429721700953611, + "learning_rate": 5.739182569864021e-07, + "loss": 1.2392, + "step": 2245 + }, + { + "epoch": 4.311089774363898, + "grad_norm": 0.5245782578257255, + "learning_rate": 5.708049024860085e-07, + "loss": 1.2023, + "step": 2246 + }, + { + "epoch": 4.313010081613058, + "grad_norm": 0.5295257685449466, + "learning_rate": 5.676995042512823e-07, + "loss": 1.0149, + "step": 2247 + }, + { + "epoch": 4.314930388862218, + "grad_norm": 0.6033834109010314, + "learning_rate": 5.646020678605219e-07, + "loss": 1.244, + "step": 2248 + }, + { + "epoch": 4.316850696111378, + "grad_norm": 0.5512668618208679, + "learning_rate": 5.615125988777248e-07, + "loss": 1.1784, + "step": 2249 + }, + { + "epoch": 4.3187710033605375, + "grad_norm": 0.5312709859648651, + "learning_rate": 5.584311028525774e-07, + "loss": 1.1566, + "step": 2250 + }, + { + "epoch": 4.320691310609698, + "grad_norm": 0.5106762768516646, + "learning_rate": 5.553575853204385e-07, + "loss": 1.2307, + "step": 2251 + }, + { + "epoch": 4.322611617858858, + "grad_norm": 0.5315187730718424, + "learning_rate": 5.522920518023406e-07, + "loss": 1.1602, + "step": 2252 + }, + { + "epoch": 4.324531925108017, + "grad_norm": 0.6076547942912415, + "learning_rate": 5.492345078049737e-07, + "loss": 1.2127, + "step": 2253 + }, + { + "epoch": 4.326452232357177, + "grad_norm": 0.5391080081878457, + "learning_rate": 5.461849588206725e-07, + "loss": 1.0823, + "step": 2254 + }, + { + "epoch": 4.328372539606337, + "grad_norm": 0.5919064362187543, + "learning_rate": 5.431434103274153e-07, + "loss": 1.2054, + "step": 2255 + }, + { + "epoch": 4.330292846855497, + "grad_norm": 0.5649228057762852, + "learning_rate": 5.401098677888029e-07, + "loss": 1.0109, + "step": 2256 + }, + { + "epoch": 4.3322131541046565, + "grad_norm": 0.591323902607826, + "learning_rate": 5.370843366540607e-07, + "loss": 1.1044, + "step": 2257 + }, + { + "epoch": 4.334133461353817, + "grad_norm": 0.5125718280453092, + "learning_rate": 5.340668223580181e-07, + "loss": 1.2011, + "step": 2258 + }, + { + "epoch": 4.3360537686029765, + "grad_norm": 0.5986752103608021, + "learning_rate": 5.310573303211081e-07, + "loss": 1.0793, + "step": 2259 + }, + { + "epoch": 4.337974075852136, + "grad_norm": 0.5613343883486763, + "learning_rate": 5.280558659493495e-07, + "loss": 1.1946, + "step": 2260 + }, + { + "epoch": 4.339894383101297, + "grad_norm": 0.5335537502778783, + "learning_rate": 5.250624346343413e-07, + "loss": 1.3099, + "step": 2261 + }, + { + "epoch": 4.341814690350456, + "grad_norm": 0.554257239420084, + "learning_rate": 5.220770417532551e-07, + "loss": 1.2912, + "step": 2262 + }, + { + "epoch": 4.343734997599616, + "grad_norm": 0.5321715333846765, + "learning_rate": 5.190996926688213e-07, + "loss": 1.291, + "step": 2263 + }, + { + "epoch": 4.345655304848776, + "grad_norm": 0.5784447543122451, + "learning_rate": 5.161303927293204e-07, + "loss": 1.1728, + "step": 2264 + }, + { + "epoch": 4.347575612097936, + "grad_norm": 0.4900416528497609, + "learning_rate": 5.131691472685735e-07, + "loss": 1.1323, + "step": 2265 + }, + { + "epoch": 4.3494959193470955, + "grad_norm": 0.5616008066521012, + "learning_rate": 5.102159616059365e-07, + "loss": 1.3118, + "step": 2266 + }, + { + "epoch": 4.351416226596255, + "grad_norm": 0.5898030702800516, + "learning_rate": 5.072708410462835e-07, + "loss": 1.2631, + "step": 2267 + }, + { + "epoch": 4.3533365338454155, + "grad_norm": 0.5051100906816395, + "learning_rate": 5.043337908800039e-07, + "loss": 1.1376, + "step": 2268 + }, + { + "epoch": 4.355256841094575, + "grad_norm": 0.5855841685995898, + "learning_rate": 5.014048163829871e-07, + "loss": 1.2046, + "step": 2269 + }, + { + "epoch": 4.357177148343735, + "grad_norm": 0.5832699170265024, + "learning_rate": 4.984839228166205e-07, + "loss": 1.2306, + "step": 2270 + }, + { + "epoch": 4.359097455592895, + "grad_norm": 0.49117125441130505, + "learning_rate": 4.955711154277699e-07, + "loss": 0.9553, + "step": 2271 + }, + { + "epoch": 4.361017762842055, + "grad_norm": 0.584931045667796, + "learning_rate": 4.926663994487813e-07, + "loss": 1.1349, + "step": 2272 + }, + { + "epoch": 4.362938070091214, + "grad_norm": 0.543921563572442, + "learning_rate": 4.897697800974622e-07, + "loss": 1.2854, + "step": 2273 + }, + { + "epoch": 4.364858377340375, + "grad_norm": 0.5379896955513568, + "learning_rate": 4.868812625770752e-07, + "loss": 1.1637, + "step": 2274 + }, + { + "epoch": 4.366778684589534, + "grad_norm": 0.538103766395456, + "learning_rate": 4.840008520763334e-07, + "loss": 1.1338, + "step": 2275 + }, + { + "epoch": 4.368698991838694, + "grad_norm": 0.5331291173207522, + "learning_rate": 4.81128553769385e-07, + "loss": 1.2744, + "step": 2276 + }, + { + "epoch": 4.3706192990878545, + "grad_norm": 0.522873046257309, + "learning_rate": 4.782643728158048e-07, + "loss": 1.2527, + "step": 2277 + }, + { + "epoch": 4.372539606337014, + "grad_norm": 0.5418177340799811, + "learning_rate": 4.7540831436058697e-07, + "loss": 1.1551, + "step": 2278 + }, + { + "epoch": 4.374459913586174, + "grad_norm": 0.571725849511299, + "learning_rate": 4.725603835341369e-07, + "loss": 1.3143, + "step": 2279 + }, + { + "epoch": 4.376380220835333, + "grad_norm": 0.5759865734281572, + "learning_rate": 4.6972058545225684e-07, + "loss": 1.2817, + "step": 2280 + }, + { + "epoch": 4.378300528084494, + "grad_norm": 0.5541013331746352, + "learning_rate": 4.6688892521614405e-07, + "loss": 1.1466, + "step": 2281 + }, + { + "epoch": 4.380220835333653, + "grad_norm": 0.5065827054224216, + "learning_rate": 4.64065407912373e-07, + "loss": 1.1019, + "step": 2282 + }, + { + "epoch": 4.382141142582813, + "grad_norm": 0.5683999141934853, + "learning_rate": 4.612500386128954e-07, + "loss": 1.1226, + "step": 2283 + }, + { + "epoch": 4.384061449831973, + "grad_norm": 0.5348030204636712, + "learning_rate": 4.58442822375022e-07, + "loss": 1.22, + "step": 2284 + }, + { + "epoch": 4.385981757081133, + "grad_norm": 0.516628110978289, + "learning_rate": 4.55643764241423e-07, + "loss": 1.154, + "step": 2285 + }, + { + "epoch": 4.387902064330293, + "grad_norm": 0.610882014721286, + "learning_rate": 4.528528692401091e-07, + "loss": 1.1523, + "step": 2286 + }, + { + "epoch": 4.389822371579453, + "grad_norm": 0.48791311911468993, + "learning_rate": 4.500701423844295e-07, + "loss": 1.2812, + "step": 2287 + }, + { + "epoch": 4.391742678828613, + "grad_norm": 0.486781680742149, + "learning_rate": 4.472955886730618e-07, + "loss": 1.2364, + "step": 2288 + }, + { + "epoch": 4.393662986077772, + "grad_norm": 0.5378083089476214, + "learning_rate": 4.44529213090002e-07, + "loss": 1.1949, + "step": 2289 + }, + { + "epoch": 4.395583293326933, + "grad_norm": 0.5280102302000105, + "learning_rate": 4.4177102060455337e-07, + "loss": 1.1843, + "step": 2290 + }, + { + "epoch": 4.397503600576092, + "grad_norm": 0.5473627860887377, + "learning_rate": 4.390210161713199e-07, + "loss": 1.2583, + "step": 2291 + }, + { + "epoch": 4.399423907825252, + "grad_norm": 0.5359937445898296, + "learning_rate": 4.362792047302006e-07, + "loss": 1.2244, + "step": 2292 + }, + { + "epoch": 4.4013442150744115, + "grad_norm": 0.5073768351792873, + "learning_rate": 4.335455912063724e-07, + "loss": 1.0374, + "step": 2293 + }, + { + "epoch": 4.403264522323572, + "grad_norm": 0.5403483051375655, + "learning_rate": 4.308201805102907e-07, + "loss": 1.1036, + "step": 2294 + }, + { + "epoch": 4.405184829572732, + "grad_norm": 0.5335334211307541, + "learning_rate": 4.281029775376716e-07, + "loss": 1.1679, + "step": 2295 + }, + { + "epoch": 4.407105136821891, + "grad_norm": 0.56859088630387, + "learning_rate": 4.2539398716949233e-07, + "loss": 1.2197, + "step": 2296 + }, + { + "epoch": 4.409025444071052, + "grad_norm": 0.5059125326310283, + "learning_rate": 4.2269321427197195e-07, + "loss": 1.2074, + "step": 2297 + }, + { + "epoch": 4.410945751320211, + "grad_norm": 0.5610840667558982, + "learning_rate": 4.200006636965742e-07, + "loss": 1.1575, + "step": 2298 + }, + { + "epoch": 4.412866058569371, + "grad_norm": 0.5423403388966873, + "learning_rate": 4.17316340279989e-07, + "loss": 1.2436, + "step": 2299 + }, + { + "epoch": 4.414786365818531, + "grad_norm": 0.5281583847882931, + "learning_rate": 4.14640248844127e-07, + "loss": 1.0624, + "step": 2300 + }, + { + "epoch": 4.416706673067691, + "grad_norm": 0.5153549779845854, + "learning_rate": 4.1197239419611513e-07, + "loss": 1.3118, + "step": 2301 + }, + { + "epoch": 4.4186269803168505, + "grad_norm": 0.5608457296161605, + "learning_rate": 4.093127811282821e-07, + "loss": 1.0932, + "step": 2302 + }, + { + "epoch": 4.420547287566011, + "grad_norm": 0.519317347603663, + "learning_rate": 4.0666141441815244e-07, + "loss": 1.16, + "step": 2303 + }, + { + "epoch": 4.422467594815171, + "grad_norm": 0.5866505732073137, + "learning_rate": 4.0401829882843635e-07, + "loss": 1.1031, + "step": 2304 + }, + { + "epoch": 4.42438790206433, + "grad_norm": 0.5638688976795906, + "learning_rate": 4.0138343910702536e-07, + "loss": 1.1754, + "step": 2305 + }, + { + "epoch": 4.42630820931349, + "grad_norm": 0.5598698432634595, + "learning_rate": 3.987568399869773e-07, + "loss": 1.2457, + "step": 2306 + }, + { + "epoch": 4.42822851656265, + "grad_norm": 0.5142550388792676, + "learning_rate": 3.9613850618651473e-07, + "loss": 1.0646, + "step": 2307 + }, + { + "epoch": 4.43014882381181, + "grad_norm": 0.5438870372503025, + "learning_rate": 3.935284424090091e-07, + "loss": 1.1632, + "step": 2308 + }, + { + "epoch": 4.4320691310609694, + "grad_norm": 0.5732033484626488, + "learning_rate": 3.9092665334298064e-07, + "loss": 1.3186, + "step": 2309 + }, + { + "epoch": 4.43398943831013, + "grad_norm": 0.6495038131955773, + "learning_rate": 3.8833314366208077e-07, + "loss": 1.3318, + "step": 2310 + }, + { + "epoch": 4.4359097455592895, + "grad_norm": 0.4835804434033599, + "learning_rate": 3.857479180250939e-07, + "loss": 1.1303, + "step": 2311 + }, + { + "epoch": 4.437830052808449, + "grad_norm": 0.5315223891292389, + "learning_rate": 3.831709810759188e-07, + "loss": 1.2144, + "step": 2312 + }, + { + "epoch": 4.43975036005761, + "grad_norm": 0.4737667415036436, + "learning_rate": 3.8060233744356634e-07, + "loss": 1.1118, + "step": 2313 + }, + { + "epoch": 4.441670667306769, + "grad_norm": 0.5101914681613662, + "learning_rate": 3.7804199174215183e-07, + "loss": 1.0808, + "step": 2314 + }, + { + "epoch": 4.443590974555929, + "grad_norm": 0.5393638735819681, + "learning_rate": 3.754899485708835e-07, + "loss": 1.2713, + "step": 2315 + }, + { + "epoch": 4.445511281805089, + "grad_norm": 0.521195042645774, + "learning_rate": 3.729462125140559e-07, + "loss": 1.0855, + "step": 2316 + }, + { + "epoch": 4.447431589054249, + "grad_norm": 0.5244259639397681, + "learning_rate": 3.70410788141039e-07, + "loss": 1.1507, + "step": 2317 + }, + { + "epoch": 4.449351896303408, + "grad_norm": 0.5107452272250373, + "learning_rate": 3.678836800062763e-07, + "loss": 1.2129, + "step": 2318 + }, + { + "epoch": 4.451272203552568, + "grad_norm": 0.5064965093094034, + "learning_rate": 3.6536489264926975e-07, + "loss": 1.0384, + "step": 2319 + }, + { + "epoch": 4.4531925108017285, + "grad_norm": 0.5732252837314019, + "learning_rate": 3.628544305945758e-07, + "loss": 1.2285, + "step": 2320 + }, + { + "epoch": 4.455112818050888, + "grad_norm": 0.5478092837260334, + "learning_rate": 3.6035229835179485e-07, + "loss": 1.2494, + "step": 2321 + }, + { + "epoch": 4.457033125300048, + "grad_norm": 0.47727767061092236, + "learning_rate": 3.57858500415566e-07, + "loss": 1.2083, + "step": 2322 + }, + { + "epoch": 4.458953432549208, + "grad_norm": 0.5674085054100999, + "learning_rate": 3.5537304126555484e-07, + "loss": 1.2187, + "step": 2323 + }, + { + "epoch": 4.460873739798368, + "grad_norm": 0.5900182324820775, + "learning_rate": 3.5289592536645047e-07, + "loss": 1.1513, + "step": 2324 + }, + { + "epoch": 4.462794047047527, + "grad_norm": 0.5355653617809221, + "learning_rate": 3.504271571679535e-07, + "loss": 1.2486, + "step": 2325 + }, + { + "epoch": 4.464714354296688, + "grad_norm": 0.5170647545038898, + "learning_rate": 3.479667411047677e-07, + "loss": 1.1883, + "step": 2326 + }, + { + "epoch": 4.466634661545847, + "grad_norm": 0.5669460219536886, + "learning_rate": 3.455146815965965e-07, + "loss": 1.0914, + "step": 2327 + }, + { + "epoch": 4.468554968795007, + "grad_norm": 0.5189196905113346, + "learning_rate": 3.4307098304813215e-07, + "loss": 1.1181, + "step": 2328 + }, + { + "epoch": 4.4704752760441675, + "grad_norm": 0.534830961282956, + "learning_rate": 3.406356498490465e-07, + "loss": 1.3203, + "step": 2329 + }, + { + "epoch": 4.472395583293327, + "grad_norm": 0.49082050029426566, + "learning_rate": 3.3820868637398305e-07, + "loss": 1.0177, + "step": 2330 + }, + { + "epoch": 4.474315890542487, + "grad_norm": 0.5053769422456148, + "learning_rate": 3.357900969825545e-07, + "loss": 1.2992, + "step": 2331 + }, + { + "epoch": 4.476236197791646, + "grad_norm": 0.5204340553319146, + "learning_rate": 3.333798860193277e-07, + "loss": 1.2408, + "step": 2332 + }, + { + "epoch": 4.478156505040807, + "grad_norm": 0.5562531978933155, + "learning_rate": 3.3097805781382164e-07, + "loss": 1.2486, + "step": 2333 + }, + { + "epoch": 4.480076812289966, + "grad_norm": 0.5125220027722861, + "learning_rate": 3.285846166804946e-07, + "loss": 1.4295, + "step": 2334 + }, + { + "epoch": 4.481997119539126, + "grad_norm": 0.5324610888319075, + "learning_rate": 3.2619956691874176e-07, + "loss": 1.1081, + "step": 2335 + }, + { + "epoch": 4.483917426788286, + "grad_norm": 0.5533555788743865, + "learning_rate": 3.2382291281288113e-07, + "loss": 1.1459, + "step": 2336 + }, + { + "epoch": 4.485837734037446, + "grad_norm": 0.4929628131716217, + "learning_rate": 3.2145465863215373e-07, + "loss": 1.1436, + "step": 2337 + }, + { + "epoch": 4.487758041286606, + "grad_norm": 0.6392747364741663, + "learning_rate": 3.1909480863070884e-07, + "loss": 1.2974, + "step": 2338 + }, + { + "epoch": 4.489678348535766, + "grad_norm": 0.5361263047682794, + "learning_rate": 3.167433670475978e-07, + "loss": 1.099, + "step": 2339 + }, + { + "epoch": 4.491598655784926, + "grad_norm": 0.571027917881106, + "learning_rate": 3.1440033810677117e-07, + "loss": 1.1118, + "step": 2340 + }, + { + "epoch": 4.493518963034085, + "grad_norm": 0.5466097628281635, + "learning_rate": 3.1206572601706673e-07, + "loss": 1.0978, + "step": 2341 + }, + { + "epoch": 4.495439270283246, + "grad_norm": 0.5130430772065713, + "learning_rate": 3.09739534972201e-07, + "loss": 1.2212, + "step": 2342 + }, + { + "epoch": 4.497359577532405, + "grad_norm": 0.5262663103386174, + "learning_rate": 3.074217691507642e-07, + "loss": 1.1402, + "step": 2343 + }, + { + "epoch": 4.499279884781565, + "grad_norm": 0.5138499822045522, + "learning_rate": 3.0511243271621474e-07, + "loss": 1.2374, + "step": 2344 + }, + { + "epoch": 4.5012001920307245, + "grad_norm": 0.49385785802662907, + "learning_rate": 3.028115298168649e-07, + "loss": 1.2302, + "step": 2345 + }, + { + "epoch": 4.503120499279885, + "grad_norm": 0.5484971838976654, + "learning_rate": 3.005190645858819e-07, + "loss": 1.2726, + "step": 2346 + }, + { + "epoch": 4.505040806529045, + "grad_norm": 0.5517338916826514, + "learning_rate": 2.982350411412721e-07, + "loss": 1.2384, + "step": 2347 + }, + { + "epoch": 4.506961113778204, + "grad_norm": 0.5734057674034707, + "learning_rate": 2.9595946358588144e-07, + "loss": 1.2009, + "step": 2348 + }, + { + "epoch": 4.508881421027365, + "grad_norm": 0.581387194554494, + "learning_rate": 2.9369233600738066e-07, + "loss": 1.1697, + "step": 2349 + }, + { + "epoch": 4.510801728276524, + "grad_norm": 0.5563024565198406, + "learning_rate": 2.91433662478266e-07, + "loss": 1.2039, + "step": 2350 + }, + { + "epoch": 4.512722035525684, + "grad_norm": 0.4823771249523727, + "learning_rate": 2.89183447055843e-07, + "loss": 1.1713, + "step": 2351 + }, + { + "epoch": 4.514642342774844, + "grad_norm": 0.6260998229974586, + "learning_rate": 2.8694169378222614e-07, + "loss": 1.2584, + "step": 2352 + }, + { + "epoch": 4.516562650024004, + "grad_norm": 0.4957684962314896, + "learning_rate": 2.8470840668432867e-07, + "loss": 1.1442, + "step": 2353 + }, + { + "epoch": 4.5184829572731635, + "grad_norm": 0.5443740158719033, + "learning_rate": 2.8248358977385647e-07, + "loss": 1.0438, + "step": 2354 + }, + { + "epoch": 4.520403264522324, + "grad_norm": 0.6104240560494821, + "learning_rate": 2.8026724704729946e-07, + "loss": 1.1724, + "step": 2355 + }, + { + "epoch": 4.522323571771484, + "grad_norm": 0.531034132363631, + "learning_rate": 2.7805938248592456e-07, + "loss": 1.1916, + "step": 2356 + }, + { + "epoch": 4.524243879020643, + "grad_norm": 0.4955322758372588, + "learning_rate": 2.758600000557715e-07, + "loss": 1.0864, + "step": 2357 + }, + { + "epoch": 4.526164186269803, + "grad_norm": 0.5199673563293243, + "learning_rate": 2.7366910370764e-07, + "loss": 1.2083, + "step": 2358 + }, + { + "epoch": 4.528084493518963, + "grad_norm": 0.4963034776189273, + "learning_rate": 2.714866973770897e-07, + "loss": 1.168, + "step": 2359 + }, + { + "epoch": 4.530004800768123, + "grad_norm": 0.5639440603074395, + "learning_rate": 2.6931278498442625e-07, + "loss": 1.0823, + "step": 2360 + }, + { + "epoch": 4.531925108017282, + "grad_norm": 0.52039393678363, + "learning_rate": 2.671473704346994e-07, + "loss": 1.1779, + "step": 2361 + }, + { + "epoch": 4.533845415266443, + "grad_norm": 0.5927004122788705, + "learning_rate": 2.649904576176932e-07, + "loss": 1.293, + "step": 2362 + }, + { + "epoch": 4.5357657225156025, + "grad_norm": 0.559928632716166, + "learning_rate": 2.6284205040792044e-07, + "loss": 1.2244, + "step": 2363 + }, + { + "epoch": 4.537686029764762, + "grad_norm": 0.5762922355096078, + "learning_rate": 2.6070215266461474e-07, + "loss": 1.2347, + "step": 2364 + }, + { + "epoch": 4.539606337013923, + "grad_norm": 0.5589569148614102, + "learning_rate": 2.585707682317229e-07, + "loss": 1.117, + "step": 2365 + }, + { + "epoch": 4.541526644263082, + "grad_norm": 0.5596328135508124, + "learning_rate": 2.5644790093790063e-07, + "loss": 1.1904, + "step": 2366 + }, + { + "epoch": 4.543446951512242, + "grad_norm": 0.592534507977606, + "learning_rate": 2.543335545965048e-07, + "loss": 1.2519, + "step": 2367 + }, + { + "epoch": 4.545367258761402, + "grad_norm": 0.4843630420441866, + "learning_rate": 2.5222773300558333e-07, + "loss": 1.1365, + "step": 2368 + }, + { + "epoch": 4.547287566010562, + "grad_norm": 0.6157309509381578, + "learning_rate": 2.501304399478721e-07, + "loss": 1.1968, + "step": 2369 + }, + { + "epoch": 4.549207873259721, + "grad_norm": 0.5278563202278436, + "learning_rate": 2.480416791907886e-07, + "loss": 1.2249, + "step": 2370 + }, + { + "epoch": 4.551128180508881, + "grad_norm": 0.5478439177696036, + "learning_rate": 2.4596145448642084e-07, + "loss": 1.3324, + "step": 2371 + }, + { + "epoch": 4.5530484877580415, + "grad_norm": 0.5360510862024155, + "learning_rate": 2.438897695715253e-07, + "loss": 1.1161, + "step": 2372 + }, + { + "epoch": 4.554968795007201, + "grad_norm": 0.5748950209283927, + "learning_rate": 2.418266281675163e-07, + "loss": 1.1416, + "step": 2373 + }, + { + "epoch": 4.556889102256361, + "grad_norm": 0.48769952009510925, + "learning_rate": 2.397720339804649e-07, + "loss": 1.173, + "step": 2374 + }, + { + "epoch": 4.558809409505521, + "grad_norm": 0.5396306357073073, + "learning_rate": 2.3772599070108315e-07, + "loss": 1.1701, + "step": 2375 + }, + { + "epoch": 4.560729716754681, + "grad_norm": 0.49715911678752284, + "learning_rate": 2.3568850200472838e-07, + "loss": 1.1129, + "step": 2376 + }, + { + "epoch": 4.56265002400384, + "grad_norm": 0.5253742790826745, + "learning_rate": 2.3365957155138896e-07, + "loss": 0.9932, + "step": 2377 + }, + { + "epoch": 4.564570331253001, + "grad_norm": 0.5111527548931867, + "learning_rate": 2.3163920298567677e-07, + "loss": 1.2449, + "step": 2378 + }, + { + "epoch": 4.56649063850216, + "grad_norm": 0.5904957548330131, + "learning_rate": 2.296273999368287e-07, + "loss": 1.184, + "step": 2379 + }, + { + "epoch": 4.56841094575132, + "grad_norm": 0.5689166115144071, + "learning_rate": 2.276241660186934e-07, + "loss": 1.1815, + "step": 2380 + }, + { + "epoch": 4.5703312530004805, + "grad_norm": 0.5256990224275265, + "learning_rate": 2.2562950482972578e-07, + "loss": 1.1635, + "step": 2381 + }, + { + "epoch": 4.57225156024964, + "grad_norm": 0.5764365609373793, + "learning_rate": 2.2364341995298133e-07, + "loss": 1.1195, + "step": 2382 + }, + { + "epoch": 4.5741718674988, + "grad_norm": 0.5163893118889138, + "learning_rate": 2.2166591495611123e-07, + "loss": 1.1899, + "step": 2383 + }, + { + "epoch": 4.576092174747959, + "grad_norm": 0.546995816442514, + "learning_rate": 2.1969699339135232e-07, + "loss": 1.3698, + "step": 2384 + }, + { + "epoch": 4.57801248199712, + "grad_norm": 0.5286930670581923, + "learning_rate": 2.1773665879552486e-07, + "loss": 1.1956, + "step": 2385 + }, + { + "epoch": 4.579932789246279, + "grad_norm": 0.5216234493822958, + "learning_rate": 2.1578491469002372e-07, + "loss": 0.956, + "step": 2386 + }, + { + "epoch": 4.581853096495439, + "grad_norm": 0.501265132838512, + "learning_rate": 2.1384176458081108e-07, + "loss": 1.1572, + "step": 2387 + }, + { + "epoch": 4.583773403744599, + "grad_norm": 0.5273012166717503, + "learning_rate": 2.1190721195841258e-07, + "loss": 1.2273, + "step": 2388 + }, + { + "epoch": 4.585693710993759, + "grad_norm": 0.4926204897513476, + "learning_rate": 2.0998126029790956e-07, + "loss": 1.2116, + "step": 2389 + }, + { + "epoch": 4.587614018242919, + "grad_norm": 0.569936872807269, + "learning_rate": 2.0806391305893568e-07, + "loss": 1.0993, + "step": 2390 + }, + { + "epoch": 4.589534325492079, + "grad_norm": 0.5806141488945643, + "learning_rate": 2.0615517368566317e-07, + "loss": 1.2495, + "step": 2391 + }, + { + "epoch": 4.591454632741239, + "grad_norm": 0.5873702461527202, + "learning_rate": 2.0425504560680654e-07, + "loss": 1.2546, + "step": 2392 + }, + { + "epoch": 4.593374939990398, + "grad_norm": 0.5765515477488461, + "learning_rate": 2.0236353223560933e-07, + "loss": 1.1874, + "step": 2393 + }, + { + "epoch": 4.595295247239559, + "grad_norm": 0.5085670217015211, + "learning_rate": 2.0048063696984088e-07, + "loss": 1.1626, + "step": 2394 + }, + { + "epoch": 4.597215554488718, + "grad_norm": 0.5069744445681758, + "learning_rate": 1.986063631917895e-07, + "loss": 1.2692, + "step": 2395 + }, + { + "epoch": 4.599135861737878, + "grad_norm": 0.5372860670046514, + "learning_rate": 1.9674071426825647e-07, + "loss": 1.181, + "step": 2396 + }, + { + "epoch": 4.6010561689870375, + "grad_norm": 0.4909337619779229, + "learning_rate": 1.9488369355055105e-07, + "loss": 1.0328, + "step": 2397 + }, + { + "epoch": 4.602976476236198, + "grad_norm": 0.5692149991275612, + "learning_rate": 1.9303530437448036e-07, + "loss": 1.2066, + "step": 2398 + }, + { + "epoch": 4.604896783485358, + "grad_norm": 0.592276160863725, + "learning_rate": 1.9119555006035128e-07, + "loss": 1.1506, + "step": 2399 + }, + { + "epoch": 4.606817090734517, + "grad_norm": 0.4753015582809296, + "learning_rate": 1.8936443391295578e-07, + "loss": 1.164, + "step": 2400 + }, + { + "epoch": 4.608737397983678, + "grad_norm": 0.5372487769491768, + "learning_rate": 1.8754195922156938e-07, + "loss": 1.2473, + "step": 2401 + }, + { + "epoch": 4.610657705232837, + "grad_norm": 0.5417398544490166, + "learning_rate": 1.857281292599461e-07, + "loss": 1.2876, + "step": 2402 + }, + { + "epoch": 4.612578012481997, + "grad_norm": 0.5115809698669399, + "learning_rate": 1.8392294728631243e-07, + "loss": 1.1042, + "step": 2403 + }, + { + "epoch": 4.614498319731157, + "grad_norm": 0.5677345427635627, + "learning_rate": 1.8212641654335618e-07, + "loss": 1.2678, + "step": 2404 + }, + { + "epoch": 4.616418626980317, + "grad_norm": 0.5113202813248051, + "learning_rate": 1.803385402582275e-07, + "loss": 1.173, + "step": 2405 + }, + { + "epoch": 4.6183389342294765, + "grad_norm": 0.47588765692726887, + "learning_rate": 1.7855932164253133e-07, + "loss": 1.0561, + "step": 2406 + }, + { + "epoch": 4.620259241478637, + "grad_norm": 0.565487050299193, + "learning_rate": 1.767887638923177e-07, + "loss": 1.1085, + "step": 2407 + }, + { + "epoch": 4.622179548727797, + "grad_norm": 0.5324791055465395, + "learning_rate": 1.750268701880814e-07, + "loss": 1.1686, + "step": 2408 + }, + { + "epoch": 4.624099855976956, + "grad_norm": 0.555097456214668, + "learning_rate": 1.732736436947524e-07, + "loss": 1.2407, + "step": 2409 + }, + { + "epoch": 4.626020163226116, + "grad_norm": 0.5345340734100243, + "learning_rate": 1.715290875616926e-07, + "loss": 1.2966, + "step": 2410 + }, + { + "epoch": 4.627940470475276, + "grad_norm": 0.49271079546842256, + "learning_rate": 1.6979320492268801e-07, + "loss": 0.9441, + "step": 2411 + }, + { + "epoch": 4.629860777724436, + "grad_norm": 0.49468134779050915, + "learning_rate": 1.6806599889594488e-07, + "loss": 1.0843, + "step": 2412 + }, + { + "epoch": 4.631781084973595, + "grad_norm": 0.5265498437623033, + "learning_rate": 1.6634747258408413e-07, + "loss": 1.1729, + "step": 2413 + }, + { + "epoch": 4.633701392222756, + "grad_norm": 0.5311764846777762, + "learning_rate": 1.646376290741325e-07, + "loss": 1.246, + "step": 2414 + }, + { + "epoch": 4.6356216994719155, + "grad_norm": 0.4796930821312133, + "learning_rate": 1.629364714375231e-07, + "loss": 1.1128, + "step": 2415 + }, + { + "epoch": 4.637542006721075, + "grad_norm": 0.5554852231101871, + "learning_rate": 1.6124400273008434e-07, + "loss": 1.3782, + "step": 2416 + }, + { + "epoch": 4.639462313970236, + "grad_norm": 0.4878015649531805, + "learning_rate": 1.5956022599203758e-07, + "loss": 1.2086, + "step": 2417 + }, + { + "epoch": 4.641382621219395, + "grad_norm": 0.6014279965138242, + "learning_rate": 1.5788514424798785e-07, + "loss": 1.2966, + "step": 2418 + }, + { + "epoch": 4.643302928468555, + "grad_norm": 0.5988363390480709, + "learning_rate": 1.5621876050692596e-07, + "loss": 1.1583, + "step": 2419 + }, + { + "epoch": 4.645223235717715, + "grad_norm": 0.5223654428506601, + "learning_rate": 1.5456107776221363e-07, + "loss": 1.2766, + "step": 2420 + }, + { + "epoch": 4.647143542966875, + "grad_norm": 0.536969766838571, + "learning_rate": 1.5291209899158555e-07, + "loss": 1.254, + "step": 2421 + }, + { + "epoch": 4.649063850216034, + "grad_norm": 0.5344688477901766, + "learning_rate": 1.5127182715714006e-07, + "loss": 1.1272, + "step": 2422 + }, + { + "epoch": 4.650984157465194, + "grad_norm": 0.49005104853458625, + "learning_rate": 1.4964026520533637e-07, + "loss": 1.0687, + "step": 2423 + }, + { + "epoch": 4.6529044647143545, + "grad_norm": 0.5356775002561505, + "learning_rate": 1.480174160669856e-07, + "loss": 1.0808, + "step": 2424 + }, + { + "epoch": 4.654824771963514, + "grad_norm": 0.5236216227934732, + "learning_rate": 1.4640328265725035e-07, + "loss": 1.224, + "step": 2425 + }, + { + "epoch": 4.656745079212674, + "grad_norm": 0.496558816706279, + "learning_rate": 1.4479786787563565e-07, + "loss": 1.1332, + "step": 2426 + }, + { + "epoch": 4.658665386461834, + "grad_norm": 0.6008632658563834, + "learning_rate": 1.4320117460598416e-07, + "loss": 1.2786, + "step": 2427 + }, + { + "epoch": 4.660585693710994, + "grad_norm": 0.5797179887147064, + "learning_rate": 1.4161320571647374e-07, + "loss": 1.15, + "step": 2428 + }, + { + "epoch": 4.662506000960153, + "grad_norm": 0.5613480131914534, + "learning_rate": 1.400339640596099e-07, + "loss": 1.2969, + "step": 2429 + }, + { + "epoch": 4.664426308209314, + "grad_norm": 0.5274637808731266, + "learning_rate": 1.3846345247222115e-07, + "loss": 1.0858, + "step": 2430 + }, + { + "epoch": 4.666346615458473, + "grad_norm": 0.4881603153147057, + "learning_rate": 1.3690167377545305e-07, + "loss": 1.3131, + "step": 2431 + }, + { + "epoch": 4.668266922707633, + "grad_norm": 0.5361714293924008, + "learning_rate": 1.3534863077476535e-07, + "loss": 1.2199, + "step": 2432 + }, + { + "epoch": 4.6701872299567935, + "grad_norm": 0.5247214367050599, + "learning_rate": 1.3380432625992423e-07, + "loss": 1.1926, + "step": 2433 + }, + { + "epoch": 4.672107537205953, + "grad_norm": 0.594169268613407, + "learning_rate": 1.3226876300500125e-07, + "loss": 1.2276, + "step": 2434 + }, + { + "epoch": 4.674027844455113, + "grad_norm": 0.547898525419776, + "learning_rate": 1.307419437683627e-07, + "loss": 1.1267, + "step": 2435 + }, + { + "epoch": 4.675948151704272, + "grad_norm": 0.5562615746824225, + "learning_rate": 1.2922387129267077e-07, + "loss": 1.2156, + "step": 2436 + }, + { + "epoch": 4.677868458953433, + "grad_norm": 0.5104060746771125, + "learning_rate": 1.2771454830487252e-07, + "loss": 1.1217, + "step": 2437 + }, + { + "epoch": 4.679788766202592, + "grad_norm": 0.5582914446196413, + "learning_rate": 1.2621397751620135e-07, + "loss": 1.2884, + "step": 2438 + }, + { + "epoch": 4.681709073451752, + "grad_norm": 0.504435048072775, + "learning_rate": 1.247221616221661e-07, + "loss": 1.1345, + "step": 2439 + }, + { + "epoch": 4.683629380700912, + "grad_norm": 0.5225917068411888, + "learning_rate": 1.232391033025504e-07, + "loss": 1.2439, + "step": 2440 + }, + { + "epoch": 4.685549687950072, + "grad_norm": 0.5615490121711235, + "learning_rate": 1.2176480522140654e-07, + "loss": 1.3905, + "step": 2441 + }, + { + "epoch": 4.687469995199232, + "grad_norm": 0.48679131641640216, + "learning_rate": 1.2029927002705112e-07, + "loss": 0.9632, + "step": 2442 + }, + { + "epoch": 4.689390302448392, + "grad_norm": 0.5514013840708639, + "learning_rate": 1.1884250035205713e-07, + "loss": 1.3347, + "step": 2443 + }, + { + "epoch": 4.691310609697552, + "grad_norm": 0.5548782493629567, + "learning_rate": 1.1739449881325471e-07, + "loss": 1.3207, + "step": 2444 + }, + { + "epoch": 4.693230916946711, + "grad_norm": 0.5503889286072612, + "learning_rate": 1.1595526801172263e-07, + "loss": 1.2629, + "step": 2445 + }, + { + "epoch": 4.695151224195872, + "grad_norm": 0.503350518922701, + "learning_rate": 1.1452481053278398e-07, + "loss": 1.2175, + "step": 2446 + }, + { + "epoch": 4.697071531445031, + "grad_norm": 0.5286586993771236, + "learning_rate": 1.1310312894600329e-07, + "loss": 1.2474, + "step": 2447 + }, + { + "epoch": 4.698991838694191, + "grad_norm": 0.4608532527709311, + "learning_rate": 1.1169022580517941e-07, + "loss": 1.1269, + "step": 2448 + }, + { + "epoch": 4.7009121459433505, + "grad_norm": 0.4697379386075933, + "learning_rate": 1.1028610364834324e-07, + "loss": 1.184, + "step": 2449 + }, + { + "epoch": 4.702832453192511, + "grad_norm": 0.5656952810580234, + "learning_rate": 1.088907649977522e-07, + "loss": 1.2234, + "step": 2450 + }, + { + "epoch": 4.704752760441671, + "grad_norm": 0.5303590502770811, + "learning_rate": 1.0750421235988517e-07, + "loss": 1.2229, + "step": 2451 + }, + { + "epoch": 4.70667306769083, + "grad_norm": 0.5020460120618436, + "learning_rate": 1.0612644822543871e-07, + "loss": 1.0994, + "step": 2452 + }, + { + "epoch": 4.708593374939991, + "grad_norm": 0.5364810278011805, + "learning_rate": 1.0475747506932199e-07, + "loss": 1.1166, + "step": 2453 + }, + { + "epoch": 4.71051368218915, + "grad_norm": 0.5004311202990305, + "learning_rate": 1.0339729535065346e-07, + "loss": 1.1114, + "step": 2454 + }, + { + "epoch": 4.71243398943831, + "grad_norm": 0.5583876521600946, + "learning_rate": 1.0204591151275589e-07, + "loss": 1.1724, + "step": 2455 + }, + { + "epoch": 4.71435429668747, + "grad_norm": 0.6030207082518468, + "learning_rate": 1.0070332598315135e-07, + "loss": 1.2797, + "step": 2456 + }, + { + "epoch": 4.71627460393663, + "grad_norm": 0.6156676029854226, + "learning_rate": 9.93695411735568e-08, + "loss": 1.3672, + "step": 2457 + }, + { + "epoch": 4.7181949111857895, + "grad_norm": 0.5183500903817541, + "learning_rate": 9.804455947988067e-08, + "loss": 1.1191, + "step": 2458 + }, + { + "epoch": 4.72011521843495, + "grad_norm": 0.5407524404591173, + "learning_rate": 9.672838328221856e-08, + "loss": 1.2677, + "step": 2459 + }, + { + "epoch": 4.72203552568411, + "grad_norm": 0.5399298075190136, + "learning_rate": 9.542101494484867e-08, + "loss": 1.1303, + "step": 2460 + }, + { + "epoch": 4.723955832933269, + "grad_norm": 0.5544441729503802, + "learning_rate": 9.412245681622578e-08, + "loss": 1.1566, + "step": 2461 + }, + { + "epoch": 4.725876140182429, + "grad_norm": 0.5527989429828987, + "learning_rate": 9.283271122898174e-08, + "loss": 1.2213, + "step": 2462 + }, + { + "epoch": 4.727796447431589, + "grad_norm": 0.5718533964178778, + "learning_rate": 9.155178049991442e-08, + "loss": 1.0717, + "step": 2463 + }, + { + "epoch": 4.729716754680749, + "grad_norm": 0.4885934571102279, + "learning_rate": 9.027966692999046e-08, + "loss": 1.1781, + "step": 2464 + }, + { + "epoch": 4.731637061929908, + "grad_norm": 0.558370907740342, + "learning_rate": 8.901637280433695e-08, + "loss": 1.391, + "step": 2465 + }, + { + "epoch": 4.733557369179069, + "grad_norm": 0.5440314217355111, + "learning_rate": 8.776190039223753e-08, + "loss": 1.155, + "step": 2466 + }, + { + "epoch": 4.7354776764282285, + "grad_norm": 0.5300365636614202, + "learning_rate": 8.651625194713076e-08, + "loss": 1.1979, + "step": 2467 + }, + { + "epoch": 4.737397983677388, + "grad_norm": 0.5307044395145768, + "learning_rate": 8.527942970660396e-08, + "loss": 1.2377, + "step": 2468 + }, + { + "epoch": 4.7393182909265485, + "grad_norm": 0.5578783636014477, + "learning_rate": 8.40514358923894e-08, + "loss": 1.0829, + "step": 2469 + }, + { + "epoch": 4.741238598175708, + "grad_norm": 0.5641546571850782, + "learning_rate": 8.283227271035976e-08, + "loss": 1.0492, + "step": 2470 + }, + { + "epoch": 4.743158905424868, + "grad_norm": 0.5264756400028668, + "learning_rate": 8.162194235052767e-08, + "loss": 1.1316, + "step": 2471 + }, + { + "epoch": 4.745079212674028, + "grad_norm": 0.4795614941279546, + "learning_rate": 8.042044698703676e-08, + "loss": 1.093, + "step": 2472 + }, + { + "epoch": 4.746999519923188, + "grad_norm": 0.5452223610153096, + "learning_rate": 7.922778877816062e-08, + "loss": 1.1561, + "step": 2473 + }, + { + "epoch": 4.748919827172347, + "grad_norm": 0.5516106514312045, + "learning_rate": 7.804396986629936e-08, + "loss": 1.2354, + "step": 2474 + }, + { + "epoch": 4.750840134421507, + "grad_norm": 0.5289182475654236, + "learning_rate": 7.686899237797418e-08, + "loss": 1.0805, + "step": 2475 + }, + { + "epoch": 4.7527604416706675, + "grad_norm": 0.6073886025587725, + "learning_rate": 7.570285842382396e-08, + "loss": 1.2077, + "step": 2476 + }, + { + "epoch": 4.754680748919827, + "grad_norm": 0.5689532420338969, + "learning_rate": 7.454557009860308e-08, + "loss": 1.0757, + "step": 2477 + }, + { + "epoch": 4.756601056168987, + "grad_norm": 0.5491207407988594, + "learning_rate": 7.339712948117416e-08, + "loss": 1.1028, + "step": 2478 + }, + { + "epoch": 4.758521363418147, + "grad_norm": 0.507006055333242, + "learning_rate": 7.225753863450813e-08, + "loss": 1.1865, + "step": 2479 + }, + { + "epoch": 4.760441670667307, + "grad_norm": 0.5109694123971513, + "learning_rate": 7.112679960567858e-08, + "loss": 1.0964, + "step": 2480 + }, + { + "epoch": 4.762361977916466, + "grad_norm": 0.5365809094328227, + "learning_rate": 7.000491442585855e-08, + "loss": 1.1805, + "step": 2481 + }, + { + "epoch": 4.764282285165627, + "grad_norm": 0.5371890407694033, + "learning_rate": 6.889188511031541e-08, + "loss": 1.2996, + "step": 2482 + }, + { + "epoch": 4.766202592414786, + "grad_norm": 0.5204142927128342, + "learning_rate": 6.778771365840986e-08, + "loss": 1.1391, + "step": 2483 + }, + { + "epoch": 4.768122899663946, + "grad_norm": 0.5252923468087959, + "learning_rate": 6.669240205359139e-08, + "loss": 1.2031, + "step": 2484 + }, + { + "epoch": 4.7700432069131065, + "grad_norm": 0.4993328746270183, + "learning_rate": 6.560595226339228e-08, + "loss": 1.2265, + "step": 2485 + }, + { + "epoch": 4.771963514162266, + "grad_norm": 0.4742871610685375, + "learning_rate": 6.452836623942859e-08, + "loss": 1.1204, + "step": 2486 + }, + { + "epoch": 4.773883821411426, + "grad_norm": 0.5333403102891235, + "learning_rate": 6.345964591739196e-08, + "loss": 1.1349, + "step": 2487 + }, + { + "epoch": 4.775804128660585, + "grad_norm": 0.5230144925484133, + "learning_rate": 6.239979321705003e-08, + "loss": 1.2259, + "step": 2488 + }, + { + "epoch": 4.777724435909746, + "grad_norm": 0.5460952967520551, + "learning_rate": 6.134881004224103e-08, + "loss": 1.1739, + "step": 2489 + }, + { + "epoch": 4.779644743158905, + "grad_norm": 0.5031290250235103, + "learning_rate": 6.030669828087033e-08, + "loss": 1.3077, + "step": 2490 + }, + { + "epoch": 4.781565050408066, + "grad_norm": 0.6044277964489503, + "learning_rate": 5.927345980490662e-08, + "loss": 1.3715, + "step": 2491 + }, + { + "epoch": 4.783485357657225, + "grad_norm": 0.4905711030834733, + "learning_rate": 5.8249096470380793e-08, + "loss": 1.1369, + "step": 2492 + }, + { + "epoch": 4.785405664906385, + "grad_norm": 0.5281502375797602, + "learning_rate": 5.7233610117379824e-08, + "loss": 1.3115, + "step": 2493 + }, + { + "epoch": 4.787325972155545, + "grad_norm": 0.5294866807062139, + "learning_rate": 5.622700257004676e-08, + "loss": 1.2158, + "step": 2494 + }, + { + "epoch": 4.789246279404705, + "grad_norm": 0.5659268260025048, + "learning_rate": 5.5229275636572434e-08, + "loss": 1.2302, + "step": 2495 + }, + { + "epoch": 4.791166586653865, + "grad_norm": 0.6979130255789551, + "learning_rate": 5.4240431109197075e-08, + "loss": 1.1575, + "step": 2496 + }, + { + "epoch": 4.793086893903024, + "grad_norm": 0.5322434698347602, + "learning_rate": 5.326047076420593e-08, + "loss": 1.2836, + "step": 2497 + }, + { + "epoch": 4.795007201152185, + "grad_norm": 0.6247209603233667, + "learning_rate": 5.2289396361923096e-08, + "loss": 1.1448, + "step": 2498 + }, + { + "epoch": 4.796927508401344, + "grad_norm": 0.5284714261167396, + "learning_rate": 5.132720964671378e-08, + "loss": 1.3396, + "step": 2499 + }, + { + "epoch": 4.798847815650504, + "grad_norm": 0.5370592394236299, + "learning_rate": 5.0373912346974305e-08, + "loss": 1.105, + "step": 2500 + }, + { + "epoch": 4.8007681228996635, + "grad_norm": 0.5879826079596842, + "learning_rate": 4.9429506175135975e-08, + "loss": 1.1028, + "step": 2501 + }, + { + "epoch": 4.802688430148824, + "grad_norm": 0.536270690929121, + "learning_rate": 4.849399282765732e-08, + "loss": 1.2475, + "step": 2502 + }, + { + "epoch": 4.8046087373979836, + "grad_norm": 0.5047253510806747, + "learning_rate": 4.756737398502187e-08, + "loss": 1.1813, + "step": 2503 + }, + { + "epoch": 4.806529044647144, + "grad_norm": 0.5632614704656963, + "learning_rate": 4.66496513117376e-08, + "loss": 1.3188, + "step": 2504 + }, + { + "epoch": 4.808449351896304, + "grad_norm": 0.5251833663165217, + "learning_rate": 4.574082645632971e-08, + "loss": 1.0484, + "step": 2505 + }, + { + "epoch": 4.810369659145463, + "grad_norm": 0.5709038458523662, + "learning_rate": 4.484090105134231e-08, + "loss": 1.3097, + "step": 2506 + }, + { + "epoch": 4.812289966394623, + "grad_norm": 0.487937727516239, + "learning_rate": 4.3949876713332284e-08, + "loss": 1.1648, + "step": 2507 + }, + { + "epoch": 4.814210273643783, + "grad_norm": 0.4829432187234435, + "learning_rate": 4.3067755042866534e-08, + "loss": 1.2734, + "step": 2508 + }, + { + "epoch": 4.816130580892943, + "grad_norm": 0.5563760476108452, + "learning_rate": 4.219453762452086e-08, + "loss": 1.0875, + "step": 2509 + }, + { + "epoch": 4.8180508881421025, + "grad_norm": 0.5185623995973638, + "learning_rate": 4.133022602687664e-08, + "loss": 1.2505, + "step": 2510 + }, + { + "epoch": 4.819971195391263, + "grad_norm": 0.5765001045776501, + "learning_rate": 4.047482180251583e-08, + "loss": 1.2267, + "step": 2511 + }, + { + "epoch": 4.8218915026404225, + "grad_norm": 0.5540735312910531, + "learning_rate": 3.962832648802151e-08, + "loss": 1.2215, + "step": 2512 + }, + { + "epoch": 4.823811809889582, + "grad_norm": 0.5965501933340387, + "learning_rate": 3.879074160397178e-08, + "loss": 1.2566, + "step": 2513 + }, + { + "epoch": 4.825732117138742, + "grad_norm": 0.48878200858979615, + "learning_rate": 3.7962068654941454e-08, + "loss": 1.1532, + "step": 2514 + }, + { + "epoch": 4.827652424387902, + "grad_norm": 0.5226175470088895, + "learning_rate": 3.714230912949368e-08, + "loss": 1.1692, + "step": 2515 + }, + { + "epoch": 4.829572731637062, + "grad_norm": 0.4683045214558622, + "learning_rate": 3.6331464500181656e-08, + "loss": 1.0829, + "step": 2516 + }, + { + "epoch": 4.831493038886222, + "grad_norm": 0.5847024633183215, + "learning_rate": 3.552953622354471e-08, + "loss": 1.2024, + "step": 2517 + }, + { + "epoch": 4.833413346135382, + "grad_norm": 0.5298079209642154, + "learning_rate": 3.473652574010444e-08, + "loss": 1.2373, + "step": 2518 + }, + { + "epoch": 4.8353336533845415, + "grad_norm": 0.534008490640948, + "learning_rate": 3.395243447436469e-08, + "loss": 1.1287, + "step": 2519 + }, + { + "epoch": 4.837253960633701, + "grad_norm": 0.5567745345823741, + "learning_rate": 3.317726383480657e-08, + "loss": 1.2346, + "step": 2520 + }, + { + "epoch": 4.8391742678828615, + "grad_norm": 0.535636717572194, + "learning_rate": 3.241101521388734e-08, + "loss": 1.1895, + "step": 2521 + }, + { + "epoch": 4.841094575132021, + "grad_norm": 0.5299103787312569, + "learning_rate": 3.165368998803597e-08, + "loss": 1.1591, + "step": 2522 + }, + { + "epoch": 4.843014882381181, + "grad_norm": 0.5001937342115388, + "learning_rate": 3.0905289517654816e-08, + "loss": 1.0912, + "step": 2523 + }, + { + "epoch": 4.844935189630341, + "grad_norm": 0.5788190073573825, + "learning_rate": 3.016581514711181e-08, + "loss": 1.1565, + "step": 2524 + }, + { + "epoch": 4.846855496879501, + "grad_norm": 0.550092301919898, + "learning_rate": 2.9435268204742185e-08, + "loss": 1.2451, + "step": 2525 + }, + { + "epoch": 4.84877580412866, + "grad_norm": 0.5020429205285256, + "learning_rate": 2.871365000284454e-08, + "loss": 1.313, + "step": 2526 + }, + { + "epoch": 4.85069611137782, + "grad_norm": 0.4877521237197705, + "learning_rate": 2.800096183767864e-08, + "loss": 1.2054, + "step": 2527 + }, + { + "epoch": 4.8526164186269805, + "grad_norm": 0.49930873636926976, + "learning_rate": 2.7297204989461536e-08, + "loss": 1.1671, + "step": 2528 + }, + { + "epoch": 4.85453672587614, + "grad_norm": 0.5568628938901399, + "learning_rate": 2.6602380722369203e-08, + "loss": 1.1926, + "step": 2529 + }, + { + "epoch": 4.8564570331253005, + "grad_norm": 0.556990010985446, + "learning_rate": 2.591649028453047e-08, + "loss": 1.2506, + "step": 2530 + }, + { + "epoch": 4.85837734037446, + "grad_norm": 0.46840636683679204, + "learning_rate": 2.523953490802533e-08, + "loss": 1.0517, + "step": 2531 + }, + { + "epoch": 4.86029764762362, + "grad_norm": 0.4868094986851968, + "learning_rate": 2.457151580888495e-08, + "loss": 1.3136, + "step": 2532 + }, + { + "epoch": 4.862217954872779, + "grad_norm": 0.6117100511752752, + "learning_rate": 2.391243418708722e-08, + "loss": 1.2088, + "step": 2533 + }, + { + "epoch": 4.86413826212194, + "grad_norm": 0.5468888316913659, + "learning_rate": 2.326229122655621e-08, + "loss": 1.1585, + "step": 2534 + }, + { + "epoch": 4.866058569371099, + "grad_norm": 0.4968476925484571, + "learning_rate": 2.2621088095158285e-08, + "loss": 1.1635, + "step": 2535 + }, + { + "epoch": 4.867978876620259, + "grad_norm": 0.5186381192381582, + "learning_rate": 2.1988825944702086e-08, + "loss": 1.0117, + "step": 2536 + }, + { + "epoch": 4.8698991838694194, + "grad_norm": 0.5149623594336047, + "learning_rate": 2.1365505910934114e-08, + "loss": 1.2117, + "step": 2537 + }, + { + "epoch": 4.871819491118579, + "grad_norm": 0.5557437974934595, + "learning_rate": 2.0751129113538715e-08, + "loss": 1.241, + "step": 2538 + }, + { + "epoch": 4.873739798367739, + "grad_norm": 0.5773902027486048, + "learning_rate": 2.0145696656135305e-08, + "loss": 1.2148, + "step": 2539 + }, + { + "epoch": 4.875660105616898, + "grad_norm": 0.5513445259552008, + "learning_rate": 1.9549209626276156e-08, + "loss": 1.1654, + "step": 2540 + }, + { + "epoch": 4.877580412866059, + "grad_norm": 0.5582860048200888, + "learning_rate": 1.8961669095444723e-08, + "loss": 1.2333, + "step": 2541 + }, + { + "epoch": 4.879500720115218, + "grad_norm": 0.5219994750065519, + "learning_rate": 1.8383076119053433e-08, + "loss": 1.0365, + "step": 2542 + }, + { + "epoch": 4.881421027364379, + "grad_norm": 0.6208211635835452, + "learning_rate": 1.7813431736442566e-08, + "loss": 1.3189, + "step": 2543 + }, + { + "epoch": 4.883341334613538, + "grad_norm": 0.5040691985983982, + "learning_rate": 1.7252736970877483e-08, + "loss": 1.1405, + "step": 2544 + }, + { + "epoch": 4.885261641862698, + "grad_norm": 0.5984846899685675, + "learning_rate": 1.6700992829546957e-08, + "loss": 1.2403, + "step": 2545 + }, + { + "epoch": 4.8871819491118575, + "grad_norm": 0.5274058094748629, + "learning_rate": 1.615820030356208e-08, + "loss": 1.2578, + "step": 2546 + }, + { + "epoch": 4.889102256361018, + "grad_norm": 0.478242151918592, + "learning_rate": 1.5624360367953452e-08, + "loss": 1.2025, + "step": 2547 + }, + { + "epoch": 4.891022563610178, + "grad_norm": 0.5537751205817378, + "learning_rate": 1.509947398167011e-08, + "loss": 1.0504, + "step": 2548 + }, + { + "epoch": 4.892942870859337, + "grad_norm": 0.5237404832236813, + "learning_rate": 1.4583542087577285e-08, + "loss": 1.2877, + "step": 2549 + }, + { + "epoch": 4.894863178108498, + "grad_norm": 0.5196677442429782, + "learning_rate": 1.4076565612455851e-08, + "loss": 1.1147, + "step": 2550 + }, + { + "epoch": 4.896783485357657, + "grad_norm": 0.5889332980962352, + "learning_rate": 1.3578545466998994e-08, + "loss": 1.2361, + "step": 2551 + }, + { + "epoch": 4.898703792606817, + "grad_norm": 0.5868259894928146, + "learning_rate": 1.308948254581277e-08, + "loss": 1.2011, + "step": 2552 + }, + { + "epoch": 4.9006240998559765, + "grad_norm": 0.5850281381454991, + "learning_rate": 1.260937772741111e-08, + "loss": 1.1933, + "step": 2553 + }, + { + "epoch": 4.902544407105137, + "grad_norm": 0.6064319742386225, + "learning_rate": 1.2138231874217477e-08, + "loss": 1.4161, + "step": 2554 + }, + { + "epoch": 4.9044647143542965, + "grad_norm": 0.5461269816338711, + "learning_rate": 1.1676045832562654e-08, + "loss": 1.2192, + "step": 2555 + }, + { + "epoch": 4.906385021603457, + "grad_norm": 0.5237025996616081, + "learning_rate": 1.1222820432681969e-08, + "loss": 1.304, + "step": 2556 + }, + { + "epoch": 4.908305328852617, + "grad_norm": 0.5925637335022672, + "learning_rate": 1.0778556488714731e-08, + "loss": 1.2476, + "step": 2557 + }, + { + "epoch": 4.910225636101776, + "grad_norm": 0.5135515207188895, + "learning_rate": 1.0343254798702018e-08, + "loss": 1.395, + "step": 2558 + }, + { + "epoch": 4.912145943350936, + "grad_norm": 0.5658835108960275, + "learning_rate": 9.91691614458723e-09, + "loss": 1.1321, + "step": 2559 + }, + { + "epoch": 4.914066250600096, + "grad_norm": 0.6085575279148048, + "learning_rate": 9.499541292211645e-09, + "loss": 1.3523, + "step": 2560 + }, + { + "epoch": 4.915986557849256, + "grad_norm": 0.5455561221519407, + "learning_rate": 9.091130991315534e-09, + "loss": 1.2001, + "step": 2561 + }, + { + "epoch": 4.9179068650984155, + "grad_norm": 0.49845915610778063, + "learning_rate": 8.691685975535935e-09, + "loss": 1.1212, + "step": 2562 + }, + { + "epoch": 4.919827172347576, + "grad_norm": 0.5453802974271416, + "learning_rate": 8.301206962404994e-09, + "loss": 1.2335, + "step": 2563 + }, + { + "epoch": 4.9217474795967355, + "grad_norm": 0.5975964445218158, + "learning_rate": 7.919694653349408e-09, + "loss": 1.284, + "step": 2564 + }, + { + "epoch": 4.923667786845895, + "grad_norm": 0.5071504059287852, + "learning_rate": 7.547149733688197e-09, + "loss": 1.0975, + "step": 2565 + }, + { + "epoch": 4.925588094095055, + "grad_norm": 0.5434984146631677, + "learning_rate": 7.183572872632716e-09, + "loss": 1.2349, + "step": 2566 + }, + { + "epoch": 4.927508401344215, + "grad_norm": 0.5540063829002054, + "learning_rate": 6.828964723284426e-09, + "loss": 1.2744, + "step": 2567 + }, + { + "epoch": 4.929428708593375, + "grad_norm": 0.48222637060161555, + "learning_rate": 6.483325922634342e-09, + "loss": 1.1754, + "step": 2568 + }, + { + "epoch": 4.931349015842535, + "grad_norm": 0.534996064398238, + "learning_rate": 6.14665709156137e-09, + "loss": 1.3795, + "step": 2569 + }, + { + "epoch": 4.933269323091695, + "grad_norm": 0.5248460172062902, + "learning_rate": 5.81895883483119e-09, + "loss": 1.1282, + "step": 2570 + }, + { + "epoch": 4.9351896303408544, + "grad_norm": 0.5351557243988182, + "learning_rate": 5.5002317410962625e-09, + "loss": 1.0969, + "step": 2571 + }, + { + "epoch": 4.937109937590014, + "grad_norm": 0.5065120823324496, + "learning_rate": 5.190476382893051e-09, + "loss": 1.1707, + "step": 2572 + }, + { + "epoch": 4.9390302448391745, + "grad_norm": 0.5930725519575019, + "learning_rate": 4.889693316642019e-09, + "loss": 1.2825, + "step": 2573 + }, + { + "epoch": 4.940950552088334, + "grad_norm": 0.5538000630753094, + "learning_rate": 4.597883082647636e-09, + "loss": 1.3152, + "step": 2574 + }, + { + "epoch": 4.942870859337494, + "grad_norm": 0.5886256693713557, + "learning_rate": 4.315046205094486e-09, + "loss": 1.2099, + "step": 2575 + }, + { + "epoch": 4.944791166586654, + "grad_norm": 0.516723882797326, + "learning_rate": 4.041183192049492e-09, + "loss": 1.1326, + "step": 2576 + }, + { + "epoch": 4.946711473835814, + "grad_norm": 0.5159981323805621, + "learning_rate": 3.776294535459135e-09, + "loss": 1.1157, + "step": 2577 + }, + { + "epoch": 4.948631781084973, + "grad_norm": 0.5304977735261787, + "learning_rate": 3.5203807111489074e-09, + "loss": 1.2183, + "step": 2578 + }, + { + "epoch": 4.950552088334134, + "grad_norm": 0.5163733316908455, + "learning_rate": 3.27344217882275e-09, + "loss": 1.1979, + "step": 2579 + }, + { + "epoch": 4.952472395583293, + "grad_norm": 0.544708550617515, + "learning_rate": 3.0354793820625005e-09, + "loss": 1.0515, + "step": 2580 + }, + { + "epoch": 4.954392702832453, + "grad_norm": 0.5323309624897271, + "learning_rate": 2.806492748325118e-09, + "loss": 1.1812, + "step": 2581 + }, + { + "epoch": 4.9563130100816135, + "grad_norm": 0.48093006373654595, + "learning_rate": 2.5864826889454574e-09, + "loss": 1.0504, + "step": 2582 + }, + { + "epoch": 4.958233317330773, + "grad_norm": 0.49263185228622863, + "learning_rate": 2.3754495991329397e-09, + "loss": 1.0996, + "step": 2583 + }, + { + "epoch": 4.960153624579933, + "grad_norm": 0.5119029727625991, + "learning_rate": 2.1733938579698853e-09, + "loss": 1.0899, + "step": 2584 + }, + { + "epoch": 4.962073931829092, + "grad_norm": 0.4802436676099826, + "learning_rate": 1.9803158284154022e-09, + "loss": 1.2235, + "step": 2585 + }, + { + "epoch": 4.963994239078253, + "grad_norm": 0.5565803632409412, + "learning_rate": 1.796215857298722e-09, + "loss": 1.3373, + "step": 2586 + }, + { + "epoch": 4.965914546327412, + "grad_norm": 0.5089054657543083, + "learning_rate": 1.6210942753236424e-09, + "loss": 1.2333, + "step": 2587 + }, + { + "epoch": 4.967834853576572, + "grad_norm": 0.546037373064592, + "learning_rate": 1.454951397064641e-09, + "loss": 1.1356, + "step": 2588 + }, + { + "epoch": 4.969755160825732, + "grad_norm": 0.5407196766824607, + "learning_rate": 1.2977875209679858e-09, + "loss": 1.167, + "step": 2589 + }, + { + "epoch": 4.971675468074892, + "grad_norm": 0.586108957001937, + "learning_rate": 1.149602929351179e-09, + "loss": 1.1574, + "step": 2590 + }, + { + "epoch": 4.973595775324052, + "grad_norm": 0.5231532063241261, + "learning_rate": 1.0103978884018484e-09, + "loss": 1.093, + "step": 2591 + }, + { + "epoch": 4.975516082573212, + "grad_norm": 0.579774557614323, + "learning_rate": 8.801726481766359e-10, + "loss": 1.1237, + "step": 2592 + }, + { + "epoch": 4.977436389822372, + "grad_norm": 0.5153265626277028, + "learning_rate": 7.589274426017535e-10, + "loss": 1.1715, + "step": 2593 + }, + { + "epoch": 4.979356697071531, + "grad_norm": 0.5339820587344012, + "learning_rate": 6.466624894740925e-10, + "loss": 1.2868, + "step": 2594 + }, + { + "epoch": 4.981277004320692, + "grad_norm": 0.516384026693114, + "learning_rate": 5.433779904567837e-10, + "loss": 1.2298, + "step": 2595 + }, + { + "epoch": 4.983197311569851, + "grad_norm": 0.534051090586025, + "learning_rate": 4.490741310819724e-10, + "loss": 1.3007, + "step": 2596 + }, + { + "epoch": 4.985117618819011, + "grad_norm": 0.5641462018825322, + "learning_rate": 3.637510807508182e-10, + "loss": 1.1394, + "step": 2597 + }, + { + "epoch": 4.9870379260681705, + "grad_norm": 0.5539209864636149, + "learning_rate": 2.8740899273071996e-10, + "loss": 1.1203, + "step": 2598 + }, + { + "epoch": 4.988958233317331, + "grad_norm": 0.5141437789075964, + "learning_rate": 2.2004800415642569e-10, + "loss": 1.1353, + "step": 2599 + }, + { + "epoch": 4.990878540566491, + "grad_norm": 0.4934499633470448, + "learning_rate": 1.6166823603058768e-10, + "loss": 1.216, + "step": 2600 + }, + { + "epoch": 4.99279884781565, + "grad_norm": 0.5235521487626001, + "learning_rate": 1.1226979322098707e-10, + "loss": 1.1884, + "step": 2601 + }, + { + "epoch": 4.994719155064811, + "grad_norm": 0.5418684288294026, + "learning_rate": 7.185276446441958e-11, + "loss": 1.1204, + "step": 2602 + }, + { + "epoch": 4.99663946231397, + "grad_norm": 0.493974428237323, + "learning_rate": 4.041722236280965e-11, + "loss": 1.2444, + "step": 2603 + }, + { + "epoch": 4.99855976956313, + "grad_norm": 0.529084792129618, + "learning_rate": 1.796322338376566e-11, + "loss": 1.2268, + "step": 2604 + }, + { + "epoch": 5.0, + "grad_norm": 0.529084792129618, + "learning_rate": 4.490807862800317e-12, + "loss": 1.0881, + "step": 2605 + } + ], + "logging_steps": 1, + "max_steps": 2605, + "num_input_tokens_seen": 0, + "num_train_epochs": 5, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 451881746563072.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}