| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.4840235588739972, | |
| "eval_steps": 500, | |
| "global_step": 4000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 25.625, | |
| "learning_rate": 6.180469715698394e-07, | |
| "loss": 1.7354, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 27.0, | |
| "learning_rate": 1.2360939431396788e-06, | |
| "loss": 1.7147, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 23.375, | |
| "learning_rate": 1.854140914709518e-06, | |
| "loss": 1.7215, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 22.375, | |
| "learning_rate": 2.4721878862793575e-06, | |
| "loss": 1.6731, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 18.875, | |
| "learning_rate": 3.090234857849197e-06, | |
| "loss": 1.6182, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 15.3125, | |
| "learning_rate": 3.708281829419036e-06, | |
| "loss": 1.5128, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 9.9375, | |
| "learning_rate": 4.326328800988875e-06, | |
| "loss": 1.4061, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 4.944375772558715e-06, | |
| "loss": 1.332, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 4.46875, | |
| "learning_rate": 5.562422744128554e-06, | |
| "loss": 1.1765, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 4.25, | |
| "learning_rate": 6.180469715698394e-06, | |
| "loss": 1.165, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 6.798516687268234e-06, | |
| "loss": 1.0473, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 7.416563658838072e-06, | |
| "loss": 0.9959, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.93359375, | |
| "learning_rate": 8.034610630407912e-06, | |
| "loss": 1.0219, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 8.65265760197775e-06, | |
| "loss": 0.9495, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.6796875, | |
| "learning_rate": 9.27070457354759e-06, | |
| "loss": 0.9312, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.66796875, | |
| "learning_rate": 9.88875154511743e-06, | |
| "loss": 0.94, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.71484375, | |
| "learning_rate": 1.0506798516687269e-05, | |
| "loss": 0.9317, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.703125, | |
| "learning_rate": 1.1124845488257108e-05, | |
| "loss": 0.9579, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 1.1742892459826947e-05, | |
| "loss": 0.8969, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.64453125, | |
| "learning_rate": 1.2360939431396788e-05, | |
| "loss": 0.9221, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 1.2978986402966625e-05, | |
| "loss": 0.8965, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.62890625, | |
| "learning_rate": 1.3597033374536467e-05, | |
| "loss": 0.9268, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.53515625, | |
| "learning_rate": 1.4215080346106304e-05, | |
| "loss": 0.9049, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.6171875, | |
| "learning_rate": 1.4833127317676143e-05, | |
| "loss": 0.9096, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.64453125, | |
| "learning_rate": 1.5451174289245984e-05, | |
| "loss": 0.8934, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.671875, | |
| "learning_rate": 1.6069221260815823e-05, | |
| "loss": 0.8824, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 1.6687268232385662e-05, | |
| "loss": 0.9045, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.5625, | |
| "learning_rate": 1.73053152039555e-05, | |
| "loss": 0.8949, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.625, | |
| "learning_rate": 1.792336217552534e-05, | |
| "loss": 0.8893, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.482421875, | |
| "learning_rate": 1.854140914709518e-05, | |
| "loss": 0.8906, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.54296875, | |
| "learning_rate": 1.915945611866502e-05, | |
| "loss": 0.8883, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 1.977750309023486e-05, | |
| "loss": 0.8665, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.66015625, | |
| "learning_rate": 2.0395550061804696e-05, | |
| "loss": 0.8452, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 2.1013597033374538e-05, | |
| "loss": 0.8943, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.490234375, | |
| "learning_rate": 2.1631644004944377e-05, | |
| "loss": 0.8802, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.5234375, | |
| "learning_rate": 2.2249690976514216e-05, | |
| "loss": 0.8798, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.46484375, | |
| "learning_rate": 2.2867737948084055e-05, | |
| "loss": 0.8778, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.55859375, | |
| "learning_rate": 2.3485784919653894e-05, | |
| "loss": 0.8314, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.494140625, | |
| "learning_rate": 2.4103831891223736e-05, | |
| "loss": 0.8829, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.46484375, | |
| "learning_rate": 2.4721878862793575e-05, | |
| "loss": 0.8734, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.5078125, | |
| "learning_rate": 2.5339925834363414e-05, | |
| "loss": 0.8421, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 2.595797280593325e-05, | |
| "loss": 0.8636, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 2.6576019777503092e-05, | |
| "loss": 0.856, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 2.7194066749072934e-05, | |
| "loss": 0.8537, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 2.781211372064277e-05, | |
| "loss": 0.8738, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.47265625, | |
| "learning_rate": 2.843016069221261e-05, | |
| "loss": 0.8548, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.484375, | |
| "learning_rate": 2.904820766378245e-05, | |
| "loss": 0.8214, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.52734375, | |
| "learning_rate": 2.9666254635352287e-05, | |
| "loss": 0.8489, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.474609375, | |
| "learning_rate": 3.0284301606922126e-05, | |
| "loss": 0.8641, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 3.090234857849197e-05, | |
| "loss": 0.8443, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.546875, | |
| "learning_rate": 3.1520395550061804e-05, | |
| "loss": 0.9063, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.68359375, | |
| "learning_rate": 3.2138442521631646e-05, | |
| "loss": 0.8458, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.494140625, | |
| "learning_rate": 3.275648949320149e-05, | |
| "loss": 0.8552, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.5, | |
| "learning_rate": 3.3374536464771324e-05, | |
| "loss": 0.825, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.51171875, | |
| "learning_rate": 3.3992583436341166e-05, | |
| "loss": 0.8666, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.54296875, | |
| "learning_rate": 3.4610630407911e-05, | |
| "loss": 0.8252, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.69921875, | |
| "learning_rate": 3.522867737948084e-05, | |
| "loss": 0.86, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.4921875, | |
| "learning_rate": 3.584672435105068e-05, | |
| "loss": 0.8664, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 3.646477132262052e-05, | |
| "loss": 0.8137, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.5, | |
| "learning_rate": 3.708281829419036e-05, | |
| "loss": 0.8572, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.466796875, | |
| "learning_rate": 3.77008652657602e-05, | |
| "loss": 0.8617, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.46875, | |
| "learning_rate": 3.831891223733004e-05, | |
| "loss": 0.8579, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 3.893695920889988e-05, | |
| "loss": 0.8581, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.462890625, | |
| "learning_rate": 3.955500618046972e-05, | |
| "loss": 0.8633, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.478515625, | |
| "learning_rate": 4.0173053152039556e-05, | |
| "loss": 0.852, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.478515625, | |
| "learning_rate": 4.079110012360939e-05, | |
| "loss": 0.8598, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 4.1409147095179234e-05, | |
| "loss": 0.844, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 4.2027194066749076e-05, | |
| "loss": 0.834, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.264524103831891e-05, | |
| "loss": 0.8257, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 4.3263288009888754e-05, | |
| "loss": 0.8337, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 4.3881334981458596e-05, | |
| "loss": 0.819, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.455078125, | |
| "learning_rate": 4.449938195302843e-05, | |
| "loss": 0.8306, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.47265625, | |
| "learning_rate": 4.511742892459827e-05, | |
| "loss": 0.8521, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.54296875, | |
| "learning_rate": 4.573547589616811e-05, | |
| "loss": 0.8355, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.486328125, | |
| "learning_rate": 4.635352286773795e-05, | |
| "loss": 0.8552, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.455078125, | |
| "learning_rate": 4.697156983930779e-05, | |
| "loss": 0.814, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 4.758961681087763e-05, | |
| "loss": 0.832, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 4.820766378244747e-05, | |
| "loss": 0.8399, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 4.882571075401731e-05, | |
| "loss": 0.7969, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 4.944375772558715e-05, | |
| "loss": 0.8477, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.49609375, | |
| "learning_rate": 4.99999941211936e-05, | |
| "loss": 0.8282, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.5, | |
| "learning_rate": 4.999928866777183e-05, | |
| "loss": 0.8227, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 4.999740749108744e-05, | |
| "loss": 0.8077, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.99943506796127e-05, | |
| "loss": 0.8447, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.52734375, | |
| "learning_rate": 4.999011837711028e-05, | |
| "loss": 0.8327, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.998471078262649e-05, | |
| "loss": 0.8341, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.997812815048196e-05, | |
| "loss": 0.8469, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.453125, | |
| "learning_rate": 4.9970370790259646e-05, | |
| "loss": 0.8331, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 4.9961439066790275e-05, | |
| "loss": 0.8161, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.404296875, | |
| "learning_rate": 4.995133340013522e-05, | |
| "loss": 0.8255, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.4375, | |
| "learning_rate": 4.994005426556668e-05, | |
| "loss": 0.8595, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.44140625, | |
| "learning_rate": 4.99276021935454e-05, | |
| "loss": 0.8391, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 4.991397776969566e-05, | |
| "loss": 0.8364, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 4.989918163477778e-05, | |
| "loss": 0.8018, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.9883214484657957e-05, | |
| "loss": 0.8303, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.470703125, | |
| "learning_rate": 4.986607707027556e-05, | |
| "loss": 0.837, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.466796875, | |
| "learning_rate": 4.9847770197607784e-05, | |
| "loss": 0.8289, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.982829472763177e-05, | |
| "loss": 0.8273, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.470703125, | |
| "learning_rate": 4.9807651576284104e-05, | |
| "loss": 0.7904, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 4.9785841714417734e-05, | |
| "loss": 0.8293, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.421875, | |
| "learning_rate": 4.976286616775634e-05, | |
| "loss": 0.8347, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 4.973872601684604e-05, | |
| "loss": 0.8279, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.470703125, | |
| "learning_rate": 4.971342239700462e-05, | |
| "loss": 0.8283, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.474609375, | |
| "learning_rate": 4.9686956498268114e-05, | |
| "loss": 0.8311, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 4.9659329565334854e-05, | |
| "loss": 0.8081, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 4.963054289750693e-05, | |
| "loss": 0.8201, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.4765625, | |
| "learning_rate": 4.960059784862905e-05, | |
| "loss": 0.8056, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.55859375, | |
| "learning_rate": 4.956949582702492e-05, | |
| "loss": 0.8262, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.4765625, | |
| "learning_rate": 4.953723829543095e-05, | |
| "loss": 0.8043, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 4.950382677092754e-05, | |
| "loss": 0.8074, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 4.946926282486766e-05, | |
| "loss": 0.8122, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.4609375, | |
| "learning_rate": 4.943354808280298e-05, | |
| "loss": 0.7852, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 4.9396684224407404e-05, | |
| "loss": 0.8119, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.466796875, | |
| "learning_rate": 4.9358672983398105e-05, | |
| "loss": 0.8018, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.390625, | |
| "learning_rate": 4.931951614745395e-05, | |
| "loss": 0.7945, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 4.927921555813148e-05, | |
| "loss": 0.8152, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.92377731107782e-05, | |
| "loss": 0.8117, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.919519075444358e-05, | |
| "loss": 0.7931, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.915147049178726e-05, | |
| "loss": 0.7958, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.910661437898493e-05, | |
| "loss": 0.8202, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.906062452563165e-05, | |
| "loss": 0.8097, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.901350309464256e-05, | |
| "loss": 0.7959, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.44140625, | |
| "learning_rate": 4.8965252302151234e-05, | |
| "loss": 0.7805, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.443359375, | |
| "learning_rate": 4.8915874417405394e-05, | |
| "loss": 0.8031, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.41015625, | |
| "learning_rate": 4.886537176266024e-05, | |
| "loss": 0.8371, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.881374671306917e-05, | |
| "loss": 0.8063, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.4609375, | |
| "learning_rate": 4.8761001696572166e-05, | |
| "loss": 0.7884, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 4.87071391937815e-05, | |
| "loss": 0.7977, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.4609375, | |
| "learning_rate": 4.865216173786517e-05, | |
| "loss": 0.8154, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 4.859607191442768e-05, | |
| "loss": 0.8157, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 4.853887236138851e-05, | |
| "loss": 0.792, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 4.8480565768857995e-05, | |
| "loss": 0.8177, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.842115487901086e-05, | |
| "loss": 0.8022, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 4.8360642485957195e-05, | |
| "loss": 0.8069, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.396484375, | |
| "learning_rate": 4.829903143561113e-05, | |
| "loss": 0.7789, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.453125, | |
| "learning_rate": 4.823632462555691e-05, | |
| "loss": 0.7911, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.46875, | |
| "learning_rate": 4.817252500491264e-05, | |
| "loss": 0.8172, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 4.810763557419164e-05, | |
| "loss": 0.8421, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 4.8041659385161255e-05, | |
| "loss": 0.8011, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.5078125, | |
| "learning_rate": 4.7974599540699386e-05, | |
| "loss": 0.8185, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 4.7906459194648545e-05, | |
| "loss": 0.7877, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.462890625, | |
| "learning_rate": 4.783724155166751e-05, | |
| "loss": 0.7888, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 4.7766949867080624e-05, | |
| "loss": 0.8004, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 4.76955874467247e-05, | |
| "loss": 0.8045, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.4921875, | |
| "learning_rate": 4.762315764679353e-05, | |
| "loss": 0.7865, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 4.754966387368008e-05, | |
| "loss": 0.782, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.747510958381623e-05, | |
| "loss": 0.7674, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 4.739949828351028e-05, | |
| "loss": 0.805, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.7322833528782e-05, | |
| "loss": 0.7571, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 4.7245118925195374e-05, | |
| "loss": 0.8033, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 4.716635812768911e-05, | |
| "loss": 0.821, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 4.7086554840404676e-05, | |
| "loss": 0.8236, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 4.7005712816512096e-05, | |
| "loss": 0.7947, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 4.6923835858033495e-05, | |
| "loss": 0.7978, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.4921875, | |
| "learning_rate": 4.684092781566422e-05, | |
| "loss": 0.7876, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 4.6756992588591775e-05, | |
| "loss": 0.7836, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 4.667203412431245e-05, | |
| "loss": 0.7749, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 4.658605641844564e-05, | |
| "loss": 0.8152, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 4.6499063514545934e-05, | |
| "loss": 0.7645, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 4.6411059503913e-05, | |
| "loss": 0.7905, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 4.63220485253991e-05, | |
| "loss": 0.7851, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 4.623203476521445e-05, | |
| "loss": 0.809, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.400390625, | |
| "learning_rate": 4.6141022456730395e-05, | |
| "loss": 0.7852, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.44140625, | |
| "learning_rate": 4.604901588028024e-05, | |
| "loss": 0.7987, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.48046875, | |
| "learning_rate": 4.5956019362958006e-05, | |
| "loss": 0.8158, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.586203727841488e-05, | |
| "loss": 0.8109, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 4.576707404665356e-05, | |
| "loss": 0.7697, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 4.567113413382034e-05, | |
| "loss": 0.791, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.5574222051995085e-05, | |
| "loss": 0.7941, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 4.547634235897906e-05, | |
| "loss": 0.8203, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 4.5377499658080525e-05, | |
| "loss": 0.8188, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 4.527769859789825e-05, | |
| "loss": 0.8146, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.392578125, | |
| "learning_rate": 4.5176943872102915e-05, | |
| "loss": 0.7621, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 4.5075240219216336e-05, | |
| "loss": 0.801, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.4765625, | |
| "learning_rate": 4.4972592422388634e-05, | |
| "loss": 0.7702, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.390625, | |
| "learning_rate": 4.486900530917329e-05, | |
| "loss": 0.7888, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 4.476448375130004e-05, | |
| "loss": 0.7873, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.455078125, | |
| "learning_rate": 4.4659032664445856e-05, | |
| "loss": 0.7647, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.455078125, | |
| "learning_rate": 4.4552657008003676e-05, | |
| "loss": 0.7822, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 4.4445361784849195e-05, | |
| "loss": 0.7746, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 4.43371520411056e-05, | |
| "loss": 0.8167, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 4.42280328659062e-05, | |
| "loss": 0.8368, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.4118009391155124e-05, | |
| "loss": 0.8029, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.396484375, | |
| "learning_rate": 4.400708679128596e-05, | |
| "loss": 0.8033, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 4.389527028301836e-05, | |
| "loss": 0.7797, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 4.378256512511277e-05, | |
| "loss": 0.8037, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.4921875, | |
| "learning_rate": 4.366897661812304e-05, | |
| "loss": 0.8031, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.46875, | |
| "learning_rate": 4.355451010414716e-05, | |
| "loss": 0.7943, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 4.343917096657606e-05, | |
| "loss": 0.7925, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.3322964629840344e-05, | |
| "loss": 0.7984, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.320589655915527e-05, | |
| "loss": 0.8129, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.308797226026364e-05, | |
| "loss": 0.7955, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 4.296919727917692e-05, | |
| "loss": 0.8326, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 4.284957720191438e-05, | |
| "loss": 0.8345, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.3828125, | |
| "learning_rate": 4.272911765424039e-05, | |
| "loss": 0.7972, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 4.260782430139984e-05, | |
| "loss": 0.7851, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.44140625, | |
| "learning_rate": 4.248570284785172e-05, | |
| "loss": 0.8103, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.466796875, | |
| "learning_rate": 4.236275903700078e-05, | |
| "loss": 0.8216, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 4.223899865092749e-05, | |
| "loss": 0.8012, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.443359375, | |
| "learning_rate": 4.2114427510116036e-05, | |
| "loss": 0.7701, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 4.198905147318065e-05, | |
| "loss": 0.7926, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.186287643659005e-05, | |
| "loss": 0.8113, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 4.1735908334390085e-05, | |
| "loss": 0.8093, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.3828125, | |
| "learning_rate": 4.1608153137924723e-05, | |
| "loss": 0.7778, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 4.147961685555517e-05, | |
| "loss": 0.8171, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.1350305532377334e-05, | |
| "loss": 0.8025, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.404296875, | |
| "learning_rate": 4.122022524993747e-05, | |
| "loss": 0.7875, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.41015625, | |
| "learning_rate": 4.108938212594622e-05, | |
| "loss": 0.7867, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.4375, | |
| "learning_rate": 4.095778231399086e-05, | |
| "loss": 0.7849, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.470703125, | |
| "learning_rate": 4.08254320032459e-05, | |
| "loss": 0.8028, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 4.0692337418182014e-05, | |
| "loss": 0.8001, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.474609375, | |
| "learning_rate": 4.055850481827329e-05, | |
| "loss": 0.8469, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.042394049770286e-05, | |
| "loss": 0.7858, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.400390625, | |
| "learning_rate": 4.0288650785066886e-05, | |
| "loss": 0.8021, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 4.0152642043076884e-05, | |
| "loss": 0.8165, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 4.0015920668260544e-05, | |
| "loss": 0.7834, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 3.987849309066085e-05, | |
| "loss": 0.8018, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.3984375, | |
| "learning_rate": 3.9740365773533704e-05, | |
| "loss": 0.7935, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 3.960154521304394e-05, | |
| "loss": 0.7974, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 3.946203793795982e-05, | |
| "loss": 0.7654, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.4375, | |
| "learning_rate": 3.9321850509345946e-05, | |
| "loss": 0.7982, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 3.9180989520254785e-05, | |
| "loss": 0.8053, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 3.903946159541647e-05, | |
| "loss": 0.8066, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 3.889727339092736e-05, | |
| "loss": 0.8322, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.453125, | |
| "learning_rate": 3.875443159393689e-05, | |
| "loss": 0.7522, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.48046875, | |
| "learning_rate": 3.861094292233316e-05, | |
| "loss": 0.8174, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.455078125, | |
| "learning_rate": 3.8466814124426945e-05, | |
| "loss": 0.8254, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 3.8322051978634325e-05, | |
| "loss": 0.7808, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 3.817666329315792e-05, | |
| "loss": 0.7936, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 3.803065490566667e-05, | |
| "loss": 0.8087, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 3.788403368297426e-05, | |
| "loss": 0.8025, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.453125, | |
| "learning_rate": 3.773680652071619e-05, | |
| "loss": 0.8164, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 3.758898034302547e-05, | |
| "loss": 0.8024, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 3.7440562102206924e-05, | |
| "loss": 0.7758, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 3.7291558778410314e-05, | |
| "loss": 0.7855, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 3.714197737930199e-05, | |
| "loss": 0.7729, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.400390625, | |
| "learning_rate": 3.699182493973532e-05, | |
| "loss": 0.8203, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.453125, | |
| "learning_rate": 3.684110852141991e-05, | |
| "loss": 0.8143, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 3.668983521258938e-05, | |
| "loss": 0.7886, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.470703125, | |
| "learning_rate": 3.65380121276681e-05, | |
| "loss": 0.804, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 3.638564640693654e-05, | |
| "loss": 0.7876, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.478515625, | |
| "learning_rate": 3.623274521619549e-05, | |
| "loss": 0.8019, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 3.607931574642902e-05, | |
| "loss": 0.8213, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 3.5925365213466314e-05, | |
| "loss": 0.7727, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 3.5770900857642306e-05, | |
| "loss": 0.8027, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.44140625, | |
| "learning_rate": 3.561592994345715e-05, | |
| "loss": 0.7515, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 3.546045975923458e-05, | |
| "loss": 0.7791, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 3.530449761677911e-05, | |
| "loss": 0.7931, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.484375, | |
| "learning_rate": 3.514805085103222e-05, | |
| "loss": 0.8273, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 3.499112681972734e-05, | |
| "loss": 0.7935, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 3.4833732903043806e-05, | |
| "loss": 0.8248, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.470703125, | |
| "learning_rate": 3.467587650325981e-05, | |
| "loss": 0.8168, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 3.4517565044404266e-05, | |
| "loss": 0.8103, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.421875, | |
| "learning_rate": 3.4358805971907593e-05, | |
| "loss": 0.8084, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 3.4199606752251634e-05, | |
| "loss": 0.8073, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 3.403997487261846e-05, | |
| "loss": 0.7934, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 3.3879917840538265e-05, | |
| "loss": 0.7868, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.45703125, | |
| "learning_rate": 3.371944318353626e-05, | |
| "loss": 0.8131, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 3.355855844877869e-05, | |
| "loss": 0.8016, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.51171875, | |
| "learning_rate": 3.3397271202717834e-05, | |
| "loss": 0.784, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 3.323558903073623e-05, | |
| "loss": 0.795, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 3.307351953678986e-05, | |
| "loss": 0.7958, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.466796875, | |
| "learning_rate": 3.2911070343050555e-05, | |
| "loss": 0.7914, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 3.274824908954756e-05, | |
| "loss": 0.7871, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 3.258506343380815e-05, | |
| "loss": 0.785, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 3.242152105049758e-05, | |
| "loss": 0.8188, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 3.2257629631058066e-05, | |
| "loss": 0.7935, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.466796875, | |
| "learning_rate": 3.20933968833471e-05, | |
| "loss": 0.8115, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 3.1928830531274935e-05, | |
| "loss": 0.8129, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.392578125, | |
| "learning_rate": 3.176393831444131e-05, | |
| "loss": 0.7732, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 3.159872798777149e-05, | |
| "loss": 0.794, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.421875, | |
| "learning_rate": 3.143320732115153e-05, | |
| "loss": 0.7794, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 3.126738409906284e-05, | |
| "loss": 0.7922, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 3.110126612021613e-05, | |
| "loss": 0.7881, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 3.093486119718455e-05, | |
| "loss": 0.7701, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 3.076817715603634e-05, | |
| "loss": 0.7826, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 3.060122183596676e-05, | |
| "loss": 0.7625, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 3.0434003088929354e-05, | |
| "loss": 0.7932, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.40625, | |
| "learning_rate": 3.0266528779266722e-05, | |
| "loss": 0.7694, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.453125, | |
| "learning_rate": 3.0098806783340644e-05, | |
| "loss": 0.7738, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 2.993084498916165e-05, | |
| "loss": 0.7805, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 2.9762651296018047e-05, | |
| "loss": 0.7879, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 2.9594233614104412e-05, | |
| "loss": 0.814, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.3984375, | |
| "learning_rate": 2.942559986414957e-05, | |
| "loss": 0.789, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 2.925675797704411e-05, | |
| "loss": 0.815, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 2.908771589346731e-05, | |
| "loss": 0.7808, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 2.89184815635138e-05, | |
| "loss": 0.7706, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 2.8749062946319576e-05, | |
| "loss": 0.7939, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 2.857946800968773e-05, | |
| "loss": 0.7898, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 2.8409704729713694e-05, | |
| "loss": 0.8139, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 2.8239781090410133e-05, | |
| "loss": 0.8046, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.4765625, | |
| "learning_rate": 2.806970508333146e-05, | |
| "loss": 0.7808, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 2.789948470719798e-05, | |
| "loss": 0.7892, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 2.772912796751972e-05, | |
| "loss": 0.7499, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 0.474609375, | |
| "learning_rate": 2.755864287621992e-05, | |
| "loss": 0.7509, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 2.738803745125821e-05, | |
| "loss": 0.8001, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.396484375, | |
| "learning_rate": 2.7217319716253574e-05, | |
| "loss": 0.7911, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 2.704649770010696e-05, | |
| "loss": 0.8088, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 2.6875579436623678e-05, | |
| "loss": 0.8155, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 2.6704572964135576e-05, | |
| "loss": 0.7625, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 2.6533486325123007e-05, | |
| "loss": 0.7729, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 2.636232756583657e-05, | |
| "loss": 0.7772, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 2.6191104735918686e-05, | |
| "loss": 0.7969, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 0.451171875, | |
| "learning_rate": 2.601982588802507e-05, | |
| "loss": 0.8003, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 2.5848499077445932e-05, | |
| "loss": 0.8036, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 2.5677132361727225e-05, | |
| "loss": 0.7889, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 2.5505733800291622e-05, | |
| "loss": 0.7891, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 2.5334311454059505e-05, | |
| "loss": 0.7805, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 2.516287338506989e-05, | |
| "loss": 0.8171, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.41015625, | |
| "learning_rate": 2.499142765610122e-05, | |
| "loss": 0.7768, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 2.4819982330292184e-05, | |
| "loss": 0.822, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.4375, | |
| "learning_rate": 2.4648545470762517e-05, | |
| "loss": 0.7701, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.443359375, | |
| "learning_rate": 2.4477125140233787e-05, | |
| "loss": 0.7995, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.4609375, | |
| "learning_rate": 2.4305729400650192e-05, | |
| "loss": 0.7826, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 2.4134366312799412e-05, | |
| "loss": 0.7764, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 0.396484375, | |
| "learning_rate": 2.396304393593351e-05, | |
| "loss": 0.793, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 2.37917703273899e-05, | |
| "loss": 0.7768, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 2.362055354221241e-05, | |
| "loss": 0.778, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 2.3449401632772445e-05, | |
| "loss": 0.8069, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 2.3278322648390298e-05, | |
| "loss": 0.7816, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 2.3107324634956552e-05, | |
| "loss": 0.7878, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 2.2936415634553727e-05, | |
| "loss": 0.8075, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 2.276560368507803e-05, | |
| "loss": 0.7748, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 2.2594896819861345e-05, | |
| "loss": 0.7655, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 2.2424303067293396e-05, | |
| "loss": 0.7799, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 2.2253830450444202e-05, | |
| "loss": 0.7805, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 2.208348698668674e-05, | |
| "loss": 0.7737, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 2.191328068731987e-05, | |
| "loss": 0.816, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 2.1743219557191585e-05, | |
| "loss": 0.7772, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.400390625, | |
| "learning_rate": 2.157331159432253e-05, | |
| "loss": 0.7636, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 2.1403564789529833e-05, | |
| "loss": 0.7981, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 0.3671875, | |
| "learning_rate": 2.123398712605134e-05, | |
| "loss": 0.7901, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 2.1064586579170124e-05, | |
| "loss": 0.7919, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 2.0895371115839413e-05, | |
| "loss": 0.7885, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.45703125, | |
| "learning_rate": 2.0726348694307916e-05, | |
| "loss": 0.7947, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 2.0557527263745522e-05, | |
| "loss": 0.8202, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 2.038891476386948e-05, | |
| "loss": 0.8103, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 2.0220519124570946e-05, | |
| "loss": 0.7949, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 2.0052348265542087e-05, | |
| "loss": 0.7849, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.3828125, | |
| "learning_rate": 1.9884410095903587e-05, | |
| "loss": 0.7817, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 0.39453125, | |
| "learning_rate": 1.971671251383268e-05, | |
| "loss": 0.8121, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 1.9549263406191707e-05, | |
| "loss": 0.787, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 1.9382070648157187e-05, | |
| "loss": 0.7708, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.44140625, | |
| "learning_rate": 1.921514210284945e-05, | |
| "loss": 0.779, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 1.9048485620962833e-05, | |
| "loss": 0.7638, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 1.8882109040396456e-05, | |
| "loss": 0.7859, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.41015625, | |
| "learning_rate": 1.8716020185885598e-05, | |
| "loss": 0.7698, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.37890625, | |
| "learning_rate": 1.855022686863372e-05, | |
| "loss": 0.7937, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.39453125, | |
| "learning_rate": 1.838473688594506e-05, | |
| "loss": 0.7875, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 1.821955802085798e-05, | |
| "loss": 0.8009, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.404296875, | |
| "learning_rate": 1.8054698041778877e-05, | |
| "loss": 0.7664, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.3828125, | |
| "learning_rate": 1.7890164702116867e-05, | |
| "loss": 0.7846, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 1.7725965739919113e-05, | |
| "loss": 0.7939, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.39453125, | |
| "learning_rate": 1.756210887750692e-05, | |
| "loss": 0.7927, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 1.7398601821112552e-05, | |
| "loss": 0.8056, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 1.7235452260516804e-05, | |
| "loss": 0.8037, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 1.7072667868687348e-05, | |
| "loss": 0.7432, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 0.421875, | |
| "learning_rate": 1.6910256301417856e-05, | |
| "loss": 0.7778, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 0.369140625, | |
| "learning_rate": 1.6748225196967983e-05, | |
| "loss": 0.7727, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.41015625, | |
| "learning_rate": 1.6586582175704092e-05, | |
| "loss": 0.7997, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.4375, | |
| "learning_rate": 1.6425334839740915e-05, | |
| "loss": 0.7967, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.3984375, | |
| "learning_rate": 1.6264490772583985e-05, | |
| "loss": 0.8328, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 1.6104057538772975e-05, | |
| "loss": 0.8032, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 1.594404268352599e-05, | |
| "loss": 0.7699, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.369140625, | |
| "learning_rate": 1.5784453732384652e-05, | |
| "loss": 0.8065, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 1.562529819086023e-05, | |
| "loss": 0.7946, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 1.5466583544080587e-05, | |
| "loss": 0.7898, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.49609375, | |
| "learning_rate": 1.5308317256438205e-05, | |
| "loss": 0.7733, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.48046875, | |
| "learning_rate": 1.5150506771239114e-05, | |
| "loss": 0.7995, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 1.4993159510352836e-05, | |
| "loss": 0.7843, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.41796875, | |
| "learning_rate": 1.4836282873863319e-05, | |
| "loss": 0.7567, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 1.4679884239720928e-05, | |
| "loss": 0.7699, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.421875, | |
| "learning_rate": 1.452397096339545e-05, | |
| "loss": 0.7625, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 1.436855037753016e-05, | |
| "loss": 0.764, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.46875, | |
| "learning_rate": 1.421362979159695e-05, | |
| "loss": 0.7931, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 1.4059216491552621e-05, | |
| "loss": 0.7986, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 1.3905317739496143e-05, | |
| "loss": 0.791, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.4375, | |
| "learning_rate": 1.3751940773327194e-05, | |
| "loss": 0.7896, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.439453125, | |
| "learning_rate": 1.3599092806405677e-05, | |
| "loss": 0.77, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.404296875, | |
| "learning_rate": 1.3446781027212563e-05, | |
| "loss": 0.8162, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 1.329501259901173e-05, | |
| "loss": 0.7702, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 1.3143794659513153e-05, | |
| "loss": 0.7743, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 1.299313432053713e-05, | |
| "loss": 0.7876, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.44140625, | |
| "learning_rate": 1.2843038667679905e-05, | |
| "loss": 0.7682, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 1.2693514759980345e-05, | |
| "loss": 0.7882, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 1.2544569629587996e-05, | |
| "loss": 0.7782, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 1.2396210281432374e-05, | |
| "loss": 0.8012, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.447265625, | |
| "learning_rate": 1.2248443692893463e-05, | |
| "loss": 0.8027, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.42578125, | |
| "learning_rate": 1.2101276813473642e-05, | |
| "loss": 0.7991, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.458984375, | |
| "learning_rate": 1.1954716564470772e-05, | |
| "loss": 0.784, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 1.1808769838652755e-05, | |
| "loss": 0.7678, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 1.1663443499933303e-05, | |
| "loss": 0.8044, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 1.1518744383049187e-05, | |
| "loss": 0.7707, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.421875, | |
| "learning_rate": 1.1374679293238733e-05, | |
| "loss": 0.7842, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 1.1231255005921845e-05, | |
| "loss": 0.7971, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 1.1088478266381258e-05, | |
| "loss": 0.7996, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 0.423828125, | |
| "learning_rate": 1.094635578944541e-05, | |
| "loss": 0.7945, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.421875, | |
| "learning_rate": 1.0804894259172579e-05, | |
| "loss": 0.8138, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 1.0664100328536525e-05, | |
| "loss": 0.8029, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 1.0523980619113655e-05, | |
| "loss": 0.8133, | |
| "step": 4000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5390, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 200, | |
| "total_flos": 4.738673495402086e+19, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |