| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 0, | |
| "global_step": 340, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0029411764705882353, | |
| "grad_norm": 1.4588830471038818, | |
| "learning_rate": 1e-05, | |
| "loss": 2.2815, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0058823529411764705, | |
| "grad_norm": 1.471093773841858, | |
| "learning_rate": 9.970588235294119e-06, | |
| "loss": 2.2693, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.008823529411764706, | |
| "grad_norm": 1.3569601774215698, | |
| "learning_rate": 9.941176470588236e-06, | |
| "loss": 2.2162, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.011764705882352941, | |
| "grad_norm": 1.218796730041504, | |
| "learning_rate": 9.911764705882354e-06, | |
| "loss": 2.1471, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.014705882352941176, | |
| "grad_norm": 1.0461595058441162, | |
| "learning_rate": 9.882352941176472e-06, | |
| "loss": 2.1782, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01764705882352941, | |
| "grad_norm": 0.7329587340354919, | |
| "learning_rate": 9.852941176470589e-06, | |
| "loss": 2.1145, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.020588235294117647, | |
| "grad_norm": 0.598797619342804, | |
| "learning_rate": 9.823529411764706e-06, | |
| "loss": 2.0937, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.023529411764705882, | |
| "grad_norm": 0.5003849267959595, | |
| "learning_rate": 9.794117647058824e-06, | |
| "loss": 2.0904, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.026470588235294117, | |
| "grad_norm": 0.46593984961509705, | |
| "learning_rate": 9.764705882352942e-06, | |
| "loss": 2.0499, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.029411764705882353, | |
| "grad_norm": 0.4855063557624817, | |
| "learning_rate": 9.735294117647059e-06, | |
| "loss": 2.036, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03235294117647059, | |
| "grad_norm": 0.47671785950660706, | |
| "learning_rate": 9.705882352941177e-06, | |
| "loss": 2.0764, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03529411764705882, | |
| "grad_norm": 0.43939489126205444, | |
| "learning_rate": 9.676470588235296e-06, | |
| "loss": 1.9735, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.03823529411764706, | |
| "grad_norm": 0.46610742807388306, | |
| "learning_rate": 9.647058823529412e-06, | |
| "loss": 1.9306, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.041176470588235294, | |
| "grad_norm": 0.4459688067436218, | |
| "learning_rate": 9.61764705882353e-06, | |
| "loss": 1.9308, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04411764705882353, | |
| "grad_norm": 0.4437696039676666, | |
| "learning_rate": 9.588235294117649e-06, | |
| "loss": 1.9268, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.047058823529411764, | |
| "grad_norm": 0.43988853693008423, | |
| "learning_rate": 9.558823529411766e-06, | |
| "loss": 1.9884, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.4177871346473694, | |
| "learning_rate": 9.529411764705882e-06, | |
| "loss": 1.8756, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.052941176470588235, | |
| "grad_norm": 0.4325030744075775, | |
| "learning_rate": 9.5e-06, | |
| "loss": 1.9104, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.05588235294117647, | |
| "grad_norm": 0.4101027250289917, | |
| "learning_rate": 9.470588235294119e-06, | |
| "loss": 1.7968, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.058823529411764705, | |
| "grad_norm": 0.4248889982700348, | |
| "learning_rate": 9.441176470588235e-06, | |
| "loss": 1.8519, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.061764705882352944, | |
| "grad_norm": 0.41984012722969055, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 1.8169, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.06470588235294118, | |
| "grad_norm": 0.44382062554359436, | |
| "learning_rate": 9.382352941176472e-06, | |
| "loss": 1.7935, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.06764705882352941, | |
| "grad_norm": 0.4285382032394409, | |
| "learning_rate": 9.352941176470589e-06, | |
| "loss": 1.7796, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.07058823529411765, | |
| "grad_norm": 0.41278114914894104, | |
| "learning_rate": 9.323529411764707e-06, | |
| "loss": 1.7602, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07352941176470588, | |
| "grad_norm": 0.3789098560810089, | |
| "learning_rate": 9.294117647058824e-06, | |
| "loss": 1.7267, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07647058823529412, | |
| "grad_norm": 0.3891398012638092, | |
| "learning_rate": 9.264705882352942e-06, | |
| "loss": 1.7524, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.07941176470588235, | |
| "grad_norm": 0.3455236852169037, | |
| "learning_rate": 9.23529411764706e-06, | |
| "loss": 1.6852, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08235294117647059, | |
| "grad_norm": 0.3453163504600525, | |
| "learning_rate": 9.205882352941177e-06, | |
| "loss": 1.6711, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.08529411764705883, | |
| "grad_norm": 0.36744871735572815, | |
| "learning_rate": 9.176470588235294e-06, | |
| "loss": 1.6939, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.08823529411764706, | |
| "grad_norm": 0.32585012912750244, | |
| "learning_rate": 9.147058823529412e-06, | |
| "loss": 1.6565, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09117647058823529, | |
| "grad_norm": 0.3447589576244354, | |
| "learning_rate": 9.11764705882353e-06, | |
| "loss": 1.6604, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.09411764705882353, | |
| "grad_norm": 0.3437487781047821, | |
| "learning_rate": 9.088235294117647e-06, | |
| "loss": 1.6219, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.09705882352941177, | |
| "grad_norm": 0.3249136507511139, | |
| "learning_rate": 9.058823529411765e-06, | |
| "loss": 1.5504, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.3307740092277527, | |
| "learning_rate": 9.029411764705884e-06, | |
| "loss": 1.5859, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.10294117647058823, | |
| "grad_norm": 0.3171888589859009, | |
| "learning_rate": 9e-06, | |
| "loss": 1.5263, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.10588235294117647, | |
| "grad_norm": 0.3062046766281128, | |
| "learning_rate": 8.970588235294119e-06, | |
| "loss": 1.5303, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.10882352941176471, | |
| "grad_norm": 0.29522180557250977, | |
| "learning_rate": 8.941176470588237e-06, | |
| "loss": 1.5458, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.11176470588235295, | |
| "grad_norm": 0.30089306831359863, | |
| "learning_rate": 8.911764705882354e-06, | |
| "loss": 1.5185, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11470588235294117, | |
| "grad_norm": 0.29232335090637207, | |
| "learning_rate": 8.88235294117647e-06, | |
| "loss": 1.502, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.11764705882352941, | |
| "grad_norm": 0.27594244480133057, | |
| "learning_rate": 8.852941176470588e-06, | |
| "loss": 1.4367, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12058823529411765, | |
| "grad_norm": 0.2704189419746399, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 1.4339, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.12352941176470589, | |
| "grad_norm": 0.26492875814437866, | |
| "learning_rate": 8.794117647058823e-06, | |
| "loss": 1.459, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1264705882352941, | |
| "grad_norm": 0.28606313467025757, | |
| "learning_rate": 8.764705882352942e-06, | |
| "loss": 1.4691, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.12941176470588237, | |
| "grad_norm": 0.28363236784935, | |
| "learning_rate": 8.73529411764706e-06, | |
| "loss": 1.4443, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1323529411764706, | |
| "grad_norm": 0.2818906009197235, | |
| "learning_rate": 8.705882352941177e-06, | |
| "loss": 1.4286, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.13529411764705881, | |
| "grad_norm": 0.26159507036209106, | |
| "learning_rate": 8.676470588235295e-06, | |
| "loss": 1.3945, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.13823529411764707, | |
| "grad_norm": 0.26535022258758545, | |
| "learning_rate": 8.647058823529413e-06, | |
| "loss": 1.4587, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1411764705882353, | |
| "grad_norm": 0.2538270056247711, | |
| "learning_rate": 8.61764705882353e-06, | |
| "loss": 1.4064, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.14411764705882352, | |
| "grad_norm": 0.2544410228729248, | |
| "learning_rate": 8.588235294117647e-06, | |
| "loss": 1.411, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.14705882352941177, | |
| "grad_norm": 0.2816343605518341, | |
| "learning_rate": 8.558823529411765e-06, | |
| "loss": 1.4139, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.2536643445491791, | |
| "learning_rate": 8.529411764705883e-06, | |
| "loss": 1.3882, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.15294117647058825, | |
| "grad_norm": 0.26011887192726135, | |
| "learning_rate": 8.5e-06, | |
| "loss": 1.3709, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.15588235294117647, | |
| "grad_norm": 0.2549336552619934, | |
| "learning_rate": 8.470588235294118e-06, | |
| "loss": 1.398, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.1588235294117647, | |
| "grad_norm": 0.22624710202217102, | |
| "learning_rate": 8.441176470588237e-06, | |
| "loss": 1.3423, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.16176470588235295, | |
| "grad_norm": 0.25475260615348816, | |
| "learning_rate": 8.411764705882353e-06, | |
| "loss": 1.356, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.16470588235294117, | |
| "grad_norm": 0.25191032886505127, | |
| "learning_rate": 8.382352941176472e-06, | |
| "loss": 1.3847, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.1676470588235294, | |
| "grad_norm": 0.2320636808872223, | |
| "learning_rate": 8.35294117647059e-06, | |
| "loss": 1.3318, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.17058823529411765, | |
| "grad_norm": 0.276131808757782, | |
| "learning_rate": 8.323529411764707e-06, | |
| "loss": 1.2964, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17352941176470588, | |
| "grad_norm": 0.2447700798511505, | |
| "learning_rate": 8.294117647058825e-06, | |
| "loss": 1.3262, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.17647058823529413, | |
| "grad_norm": 0.26056042313575745, | |
| "learning_rate": 8.264705882352941e-06, | |
| "loss": 1.3793, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.17941176470588235, | |
| "grad_norm": 0.21680042147636414, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 1.301, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.18235294117647058, | |
| "grad_norm": 0.21796001493930817, | |
| "learning_rate": 8.205882352941176e-06, | |
| "loss": 1.3339, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.18529411764705883, | |
| "grad_norm": 0.218816876411438, | |
| "learning_rate": 8.176470588235295e-06, | |
| "loss": 1.2843, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.18823529411764706, | |
| "grad_norm": 0.1997959166765213, | |
| "learning_rate": 8.147058823529413e-06, | |
| "loss": 1.2712, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19117647058823528, | |
| "grad_norm": 0.20165401697158813, | |
| "learning_rate": 8.11764705882353e-06, | |
| "loss": 1.2902, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.19411764705882353, | |
| "grad_norm": 0.20525865256786346, | |
| "learning_rate": 8.088235294117648e-06, | |
| "loss": 1.2632, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.19705882352941176, | |
| "grad_norm": 0.24750295281410217, | |
| "learning_rate": 8.058823529411766e-06, | |
| "loss": 1.2622, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.20822177827358246, | |
| "learning_rate": 8.029411764705883e-06, | |
| "loss": 1.2762, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.20294117647058824, | |
| "grad_norm": 0.20200103521347046, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.2593, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.20588235294117646, | |
| "grad_norm": 0.20765094459056854, | |
| "learning_rate": 7.97058823529412e-06, | |
| "loss": 1.2962, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2088235294117647, | |
| "grad_norm": 0.19960962235927582, | |
| "learning_rate": 7.941176470588236e-06, | |
| "loss": 1.2768, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.21176470588235294, | |
| "grad_norm": 0.20074956119060516, | |
| "learning_rate": 7.911764705882353e-06, | |
| "loss": 1.2417, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.21470588235294116, | |
| "grad_norm": 0.1980939656496048, | |
| "learning_rate": 7.882352941176471e-06, | |
| "loss": 1.2606, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.21764705882352942, | |
| "grad_norm": 0.2036397010087967, | |
| "learning_rate": 7.85294117647059e-06, | |
| "loss": 1.2412, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.22058823529411764, | |
| "grad_norm": 0.19486106932163239, | |
| "learning_rate": 7.823529411764706e-06, | |
| "loss": 1.2154, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.2235294117647059, | |
| "grad_norm": 0.19677641987800598, | |
| "learning_rate": 7.794117647058825e-06, | |
| "loss": 1.2178, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.22647058823529412, | |
| "grad_norm": 0.18516194820404053, | |
| "learning_rate": 7.764705882352941e-06, | |
| "loss": 1.2198, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.22941176470588234, | |
| "grad_norm": 0.2153574824333191, | |
| "learning_rate": 7.73529411764706e-06, | |
| "loss": 1.2526, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.2323529411764706, | |
| "grad_norm": 0.19904084503650665, | |
| "learning_rate": 7.705882352941178e-06, | |
| "loss": 1.2524, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.23529411764705882, | |
| "grad_norm": 0.19030578434467316, | |
| "learning_rate": 7.676470588235294e-06, | |
| "loss": 1.2112, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.23823529411764705, | |
| "grad_norm": 0.19393247365951538, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 1.2088, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.2411764705882353, | |
| "grad_norm": 0.1918642818927765, | |
| "learning_rate": 7.617647058823529e-06, | |
| "loss": 1.1991, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.24411764705882352, | |
| "grad_norm": 0.1984645426273346, | |
| "learning_rate": 7.588235294117648e-06, | |
| "loss": 1.1831, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.24705882352941178, | |
| "grad_norm": 0.18611128628253937, | |
| "learning_rate": 7.558823529411765e-06, | |
| "loss": 1.1878, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.23308077454566956, | |
| "learning_rate": 7.529411764705883e-06, | |
| "loss": 1.256, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2529411764705882, | |
| "grad_norm": 0.21278584003448486, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 1.2132, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.25588235294117645, | |
| "grad_norm": 0.1996898353099823, | |
| "learning_rate": 7.4705882352941185e-06, | |
| "loss": 1.2179, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.25882352941176473, | |
| "grad_norm": 0.1844678670167923, | |
| "learning_rate": 7.441176470588236e-06, | |
| "loss": 1.2023, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.26176470588235295, | |
| "grad_norm": 0.18834266066551208, | |
| "learning_rate": 7.4117647058823535e-06, | |
| "loss": 1.1773, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2647058823529412, | |
| "grad_norm": 0.1843290776014328, | |
| "learning_rate": 7.382352941176472e-06, | |
| "loss": 1.1989, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2676470588235294, | |
| "grad_norm": 0.19822850823402405, | |
| "learning_rate": 7.352941176470589e-06, | |
| "loss": 1.2012, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.27058823529411763, | |
| "grad_norm": 0.224341481924057, | |
| "learning_rate": 7.323529411764706e-06, | |
| "loss": 1.2019, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2735294117647059, | |
| "grad_norm": 0.1946311742067337, | |
| "learning_rate": 7.294117647058823e-06, | |
| "loss": 1.1963, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.27647058823529413, | |
| "grad_norm": 0.19121474027633667, | |
| "learning_rate": 7.264705882352942e-06, | |
| "loss": 1.1697, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.27941176470588236, | |
| "grad_norm": 0.40967074036598206, | |
| "learning_rate": 7.235294117647059e-06, | |
| "loss": 1.1968, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2823529411764706, | |
| "grad_norm": 0.19602446258068085, | |
| "learning_rate": 7.205882352941177e-06, | |
| "loss": 1.1593, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2852941176470588, | |
| "grad_norm": 0.1895453929901123, | |
| "learning_rate": 7.176470588235295e-06, | |
| "loss": 1.1737, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.28823529411764703, | |
| "grad_norm": 0.1971290111541748, | |
| "learning_rate": 7.1470588235294125e-06, | |
| "loss": 1.1937, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2911764705882353, | |
| "grad_norm": 0.183615580201149, | |
| "learning_rate": 7.11764705882353e-06, | |
| "loss": 1.1823, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.29411764705882354, | |
| "grad_norm": 0.20080818235874176, | |
| "learning_rate": 7.088235294117648e-06, | |
| "loss": 1.1882, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.29705882352941176, | |
| "grad_norm": 0.20981959998607635, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 1.1916, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.19891297817230225, | |
| "learning_rate": 7.029411764705882e-06, | |
| "loss": 1.168, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3029411764705882, | |
| "grad_norm": 0.19170914590358734, | |
| "learning_rate": 7e-06, | |
| "loss": 1.1672, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.3058823529411765, | |
| "grad_norm": 0.19901618361473083, | |
| "learning_rate": 6.970588235294118e-06, | |
| "loss": 1.172, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3088235294117647, | |
| "grad_norm": 0.20435598492622375, | |
| "learning_rate": 6.941176470588236e-06, | |
| "loss": 1.1596, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.31176470588235294, | |
| "grad_norm": 0.19358821213245392, | |
| "learning_rate": 6.911764705882353e-06, | |
| "loss": 1.1541, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.31470588235294117, | |
| "grad_norm": 0.22525909543037415, | |
| "learning_rate": 6.8823529411764715e-06, | |
| "loss": 1.1385, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.3176470588235294, | |
| "grad_norm": 0.22417089343070984, | |
| "learning_rate": 6.852941176470589e-06, | |
| "loss": 1.204, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3205882352941177, | |
| "grad_norm": 0.20851701498031616, | |
| "learning_rate": 6.8235294117647065e-06, | |
| "loss": 1.1874, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.3235294117647059, | |
| "grad_norm": 0.19366827607154846, | |
| "learning_rate": 6.794117647058824e-06, | |
| "loss": 1.1461, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3264705882352941, | |
| "grad_norm": 0.1981084942817688, | |
| "learning_rate": 6.764705882352942e-06, | |
| "loss": 1.1369, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.32941176470588235, | |
| "grad_norm": 0.21598248183727264, | |
| "learning_rate": 6.73529411764706e-06, | |
| "loss": 1.1471, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3323529411764706, | |
| "grad_norm": 0.2313269078731537, | |
| "learning_rate": 6.705882352941176e-06, | |
| "loss": 1.17, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.3352941176470588, | |
| "grad_norm": 0.2236504703760147, | |
| "learning_rate": 6.676470588235294e-06, | |
| "loss": 1.1372, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.3382352941176471, | |
| "grad_norm": 0.20379534363746643, | |
| "learning_rate": 6.647058823529412e-06, | |
| "loss": 1.1667, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3411764705882353, | |
| "grad_norm": 0.21076861023902893, | |
| "learning_rate": 6.61764705882353e-06, | |
| "loss": 1.1847, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.34411764705882353, | |
| "grad_norm": 0.21549032628536224, | |
| "learning_rate": 6.588235294117647e-06, | |
| "loss": 1.1241, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.34705882352941175, | |
| "grad_norm": 0.20546089112758636, | |
| "learning_rate": 6.5588235294117655e-06, | |
| "loss": 1.1105, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.22100862860679626, | |
| "learning_rate": 6.529411764705883e-06, | |
| "loss": 1.1139, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.35294117647058826, | |
| "grad_norm": 0.22141504287719727, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 1.1616, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3558823529411765, | |
| "grad_norm": 0.20799893140792847, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 1.0986, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.3588235294117647, | |
| "grad_norm": 0.1906932294368744, | |
| "learning_rate": 6.441176470588236e-06, | |
| "loss": 1.1263, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.36176470588235293, | |
| "grad_norm": 0.2036122828722, | |
| "learning_rate": 6.411764705882354e-06, | |
| "loss": 1.1189, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.36470588235294116, | |
| "grad_norm": 0.21240055561065674, | |
| "learning_rate": 6.38235294117647e-06, | |
| "loss": 1.156, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.36764705882352944, | |
| "grad_norm": 0.21339499950408936, | |
| "learning_rate": 6.352941176470589e-06, | |
| "loss": 1.1046, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.37058823529411766, | |
| "grad_norm": 0.20749010145664215, | |
| "learning_rate": 6.323529411764706e-06, | |
| "loss": 1.1369, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.3735294117647059, | |
| "grad_norm": 0.25060978531837463, | |
| "learning_rate": 6.294117647058824e-06, | |
| "loss": 1.1424, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.3764705882352941, | |
| "grad_norm": 0.22475501894950867, | |
| "learning_rate": 6.264705882352942e-06, | |
| "loss": 1.1346, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.37941176470588234, | |
| "grad_norm": 0.26187384128570557, | |
| "learning_rate": 6.2352941176470595e-06, | |
| "loss": 1.0951, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.38235294117647056, | |
| "grad_norm": 0.21285903453826904, | |
| "learning_rate": 6.205882352941177e-06, | |
| "loss": 1.1309, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.38529411764705884, | |
| "grad_norm": 0.2294841706752777, | |
| "learning_rate": 6.176470588235295e-06, | |
| "loss": 1.133, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.38823529411764707, | |
| "grad_norm": 0.21611462533473969, | |
| "learning_rate": 6.147058823529413e-06, | |
| "loss": 1.1325, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.3911764705882353, | |
| "grad_norm": 0.22627894580364227, | |
| "learning_rate": 6.11764705882353e-06, | |
| "loss": 1.1467, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.3941176470588235, | |
| "grad_norm": 0.2144460827112198, | |
| "learning_rate": 6.088235294117647e-06, | |
| "loss": 1.1093, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.39705882352941174, | |
| "grad_norm": 0.20621620118618011, | |
| "learning_rate": 6.058823529411765e-06, | |
| "loss": 1.1538, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.19962570071220398, | |
| "learning_rate": 6.029411764705883e-06, | |
| "loss": 1.1032, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.40294117647058825, | |
| "grad_norm": 0.2382269650697708, | |
| "learning_rate": 6e-06, | |
| "loss": 1.1381, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.40588235294117647, | |
| "grad_norm": 0.20725472271442413, | |
| "learning_rate": 5.970588235294118e-06, | |
| "loss": 1.1153, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4088235294117647, | |
| "grad_norm": 0.21083097159862518, | |
| "learning_rate": 5.941176470588236e-06, | |
| "loss": 1.102, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.4117647058823529, | |
| "grad_norm": 0.22206459939479828, | |
| "learning_rate": 5.9117647058823534e-06, | |
| "loss": 1.1309, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4147058823529412, | |
| "grad_norm": 0.22022652626037598, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 1.1515, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4176470588235294, | |
| "grad_norm": 0.2197543978691101, | |
| "learning_rate": 5.852941176470589e-06, | |
| "loss": 1.1402, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.42058823529411765, | |
| "grad_norm": 0.22253328561782837, | |
| "learning_rate": 5.823529411764707e-06, | |
| "loss": 1.1038, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4235294117647059, | |
| "grad_norm": 0.21993358433246613, | |
| "learning_rate": 5.794117647058824e-06, | |
| "loss": 1.1258, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4264705882352941, | |
| "grad_norm": 0.24475879967212677, | |
| "learning_rate": 5.764705882352941e-06, | |
| "loss": 1.0968, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.4294117647058823, | |
| "grad_norm": 0.2095886617898941, | |
| "learning_rate": 5.735294117647059e-06, | |
| "loss": 1.1428, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.4323529411764706, | |
| "grad_norm": 0.21642598509788513, | |
| "learning_rate": 5.705882352941177e-06, | |
| "loss": 1.1131, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.43529411764705883, | |
| "grad_norm": 0.2527022659778595, | |
| "learning_rate": 5.676470588235294e-06, | |
| "loss": 1.1412, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.43823529411764706, | |
| "grad_norm": 0.2121608853340149, | |
| "learning_rate": 5.6470588235294125e-06, | |
| "loss": 1.1005, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.4411764705882353, | |
| "grad_norm": 0.23695634305477142, | |
| "learning_rate": 5.61764705882353e-06, | |
| "loss": 1.1315, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4441176470588235, | |
| "grad_norm": 0.22872112691402435, | |
| "learning_rate": 5.588235294117647e-06, | |
| "loss": 1.0705, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.4470588235294118, | |
| "grad_norm": 0.2091210037469864, | |
| "learning_rate": 5.558823529411766e-06, | |
| "loss": 1.1232, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.2245001345872879, | |
| "learning_rate": 5.529411764705883e-06, | |
| "loss": 1.1113, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.45294117647058824, | |
| "grad_norm": 0.2469629943370819, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 1.1097, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.45588235294117646, | |
| "grad_norm": 0.24886055290699005, | |
| "learning_rate": 5.470588235294119e-06, | |
| "loss": 1.1161, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.4588235294117647, | |
| "grad_norm": 0.230917289853096, | |
| "learning_rate": 5.441176470588236e-06, | |
| "loss": 1.0882, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.46176470588235297, | |
| "grad_norm": 0.315561443567276, | |
| "learning_rate": 5.411764705882353e-06, | |
| "loss": 1.0864, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.4647058823529412, | |
| "grad_norm": 0.2106507569551468, | |
| "learning_rate": 5.382352941176471e-06, | |
| "loss": 1.091, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.4676470588235294, | |
| "grad_norm": 0.22026994824409485, | |
| "learning_rate": 5.352941176470589e-06, | |
| "loss": 1.0655, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.47058823529411764, | |
| "grad_norm": 0.23006486892700195, | |
| "learning_rate": 5.3235294117647064e-06, | |
| "loss": 1.0913, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.47352941176470587, | |
| "grad_norm": 0.2270439714193344, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 1.076, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.4764705882352941, | |
| "grad_norm": 0.25586023926734924, | |
| "learning_rate": 5.264705882352941e-06, | |
| "loss": 1.134, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.47941176470588237, | |
| "grad_norm": 0.22336019575595856, | |
| "learning_rate": 5.23529411764706e-06, | |
| "loss": 1.1006, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4823529411764706, | |
| "grad_norm": 0.22112180292606354, | |
| "learning_rate": 5.205882352941177e-06, | |
| "loss": 1.0788, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.4852941176470588, | |
| "grad_norm": 0.21939323842525482, | |
| "learning_rate": 5.176470588235295e-06, | |
| "loss": 1.0916, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.48823529411764705, | |
| "grad_norm": 0.21641314029693604, | |
| "learning_rate": 5.147058823529411e-06, | |
| "loss": 1.0845, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.49117647058823527, | |
| "grad_norm": 0.20961056649684906, | |
| "learning_rate": 5.11764705882353e-06, | |
| "loss": 1.0925, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.49411764705882355, | |
| "grad_norm": 0.2464180290699005, | |
| "learning_rate": 5.088235294117647e-06, | |
| "loss": 1.0962, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.4970588235294118, | |
| "grad_norm": 0.2157599776983261, | |
| "learning_rate": 5.058823529411765e-06, | |
| "loss": 1.0519, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.22428640723228455, | |
| "learning_rate": 5.029411764705883e-06, | |
| "loss": 1.0584, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5029411764705882, | |
| "grad_norm": 0.21695849299430847, | |
| "learning_rate": 5e-06, | |
| "loss": 1.0702, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5058823529411764, | |
| "grad_norm": 0.22800521552562714, | |
| "learning_rate": 4.970588235294118e-06, | |
| "loss": 1.0781, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5088235294117647, | |
| "grad_norm": 0.21697255969047546, | |
| "learning_rate": 4.941176470588236e-06, | |
| "loss": 1.0828, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5117647058823529, | |
| "grad_norm": 0.22448962926864624, | |
| "learning_rate": 4.911764705882353e-06, | |
| "loss": 1.0685, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5147058823529411, | |
| "grad_norm": 0.24604758620262146, | |
| "learning_rate": 4.882352941176471e-06, | |
| "loss": 1.0435, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5176470588235295, | |
| "grad_norm": 0.23682597279548645, | |
| "learning_rate": 4.852941176470589e-06, | |
| "loss": 1.0897, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5205882352941177, | |
| "grad_norm": 0.23220428824424744, | |
| "learning_rate": 4.823529411764706e-06, | |
| "loss": 1.0848, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5235294117647059, | |
| "grad_norm": 0.21729770302772522, | |
| "learning_rate": 4.7941176470588245e-06, | |
| "loss": 1.1027, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5264705882352941, | |
| "grad_norm": 0.36938586831092834, | |
| "learning_rate": 4.764705882352941e-06, | |
| "loss": 1.0855, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5294117647058824, | |
| "grad_norm": 0.22503703832626343, | |
| "learning_rate": 4.7352941176470594e-06, | |
| "loss": 1.0892, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5323529411764706, | |
| "grad_norm": 0.22622622549533844, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 1.0941, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5352941176470588, | |
| "grad_norm": 0.27123570442199707, | |
| "learning_rate": 4.676470588235294e-06, | |
| "loss": 1.0792, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.538235294117647, | |
| "grad_norm": 0.23995374143123627, | |
| "learning_rate": 4.647058823529412e-06, | |
| "loss": 1.0669, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.5411764705882353, | |
| "grad_norm": 0.2274896800518036, | |
| "learning_rate": 4.61764705882353e-06, | |
| "loss": 1.0665, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5441176470588235, | |
| "grad_norm": 0.2378711700439453, | |
| "learning_rate": 4.588235294117647e-06, | |
| "loss": 1.0748, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5470588235294118, | |
| "grad_norm": 0.2415841519832611, | |
| "learning_rate": 4.558823529411765e-06, | |
| "loss": 1.0837, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.21746912598609924, | |
| "learning_rate": 4.529411764705883e-06, | |
| "loss": 1.079, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.5529411764705883, | |
| "grad_norm": 0.353338360786438, | |
| "learning_rate": 4.5e-06, | |
| "loss": 1.05, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5558823529411765, | |
| "grad_norm": 0.23014746606349945, | |
| "learning_rate": 4.4705882352941184e-06, | |
| "loss": 1.092, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5588235294117647, | |
| "grad_norm": 0.23989826440811157, | |
| "learning_rate": 4.441176470588235e-06, | |
| "loss": 1.0795, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5617647058823529, | |
| "grad_norm": 0.2372935563325882, | |
| "learning_rate": 4.411764705882353e-06, | |
| "loss": 1.0599, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5647058823529412, | |
| "grad_norm": 0.22507061064243317, | |
| "learning_rate": 4.382352941176471e-06, | |
| "loss": 1.0746, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.5676470588235294, | |
| "grad_norm": 0.2515420913696289, | |
| "learning_rate": 4.352941176470588e-06, | |
| "loss": 1.0765, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5705882352941176, | |
| "grad_norm": 0.23316609859466553, | |
| "learning_rate": 4.323529411764707e-06, | |
| "loss": 1.1088, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5735294117647058, | |
| "grad_norm": 0.24043501913547516, | |
| "learning_rate": 4.294117647058823e-06, | |
| "loss": 1.1, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.5764705882352941, | |
| "grad_norm": 0.22288966178894043, | |
| "learning_rate": 4.264705882352942e-06, | |
| "loss": 1.0924, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5794117647058824, | |
| "grad_norm": 0.23474599421024323, | |
| "learning_rate": 4.235294117647059e-06, | |
| "loss": 1.0658, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.5823529411764706, | |
| "grad_norm": 0.2446664720773697, | |
| "learning_rate": 4.205882352941177e-06, | |
| "loss": 1.0773, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.5852941176470589, | |
| "grad_norm": 0.247602179646492, | |
| "learning_rate": 4.176470588235295e-06, | |
| "loss": 1.0627, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.5882352941176471, | |
| "grad_norm": 0.2503092586994171, | |
| "learning_rate": 4.147058823529412e-06, | |
| "loss": 1.0938, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.5911764705882353, | |
| "grad_norm": 0.23191429674625397, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 1.0668, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.5941176470588235, | |
| "grad_norm": 0.23563452064990997, | |
| "learning_rate": 4.088235294117647e-06, | |
| "loss": 1.0328, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.5970588235294118, | |
| "grad_norm": 0.27292484045028687, | |
| "learning_rate": 4.058823529411765e-06, | |
| "loss": 1.0473, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.21903876960277557, | |
| "learning_rate": 4.029411764705883e-06, | |
| "loss": 1.0523, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6029411764705882, | |
| "grad_norm": 0.3081904351711273, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.0887, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6058823529411764, | |
| "grad_norm": 0.31239840388298035, | |
| "learning_rate": 3.970588235294118e-06, | |
| "loss": 1.0981, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.6088235294117647, | |
| "grad_norm": 0.23030546307563782, | |
| "learning_rate": 3.941176470588236e-06, | |
| "loss": 1.0273, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.611764705882353, | |
| "grad_norm": 0.24007487297058105, | |
| "learning_rate": 3.911764705882353e-06, | |
| "loss": 1.0981, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6147058823529412, | |
| "grad_norm": 0.2520597577095032, | |
| "learning_rate": 3.882352941176471e-06, | |
| "loss": 1.0742, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.6176470588235294, | |
| "grad_norm": 0.24418134987354279, | |
| "learning_rate": 3.852941176470589e-06, | |
| "loss": 1.0769, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6205882352941177, | |
| "grad_norm": 0.24392227828502655, | |
| "learning_rate": 3.8235294117647055e-06, | |
| "loss": 1.0591, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6235294117647059, | |
| "grad_norm": 0.25623035430908203, | |
| "learning_rate": 3.794117647058824e-06, | |
| "loss": 1.0524, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.6264705882352941, | |
| "grad_norm": 0.25523054599761963, | |
| "learning_rate": 3.7647058823529414e-06, | |
| "loss": 1.0785, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6294117647058823, | |
| "grad_norm": 0.2477787584066391, | |
| "learning_rate": 3.7352941176470593e-06, | |
| "loss": 1.084, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6323529411764706, | |
| "grad_norm": 0.22725874185562134, | |
| "learning_rate": 3.7058823529411767e-06, | |
| "loss": 1.0784, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6352941176470588, | |
| "grad_norm": 0.25304916501045227, | |
| "learning_rate": 3.6764705882352946e-06, | |
| "loss": 1.067, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.638235294117647, | |
| "grad_norm": 0.23904795944690704, | |
| "learning_rate": 3.6470588235294117e-06, | |
| "loss": 1.0671, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.6411764705882353, | |
| "grad_norm": 0.35910505056381226, | |
| "learning_rate": 3.6176470588235296e-06, | |
| "loss": 1.1031, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.6441176470588236, | |
| "grad_norm": 0.2860150933265686, | |
| "learning_rate": 3.5882352941176475e-06, | |
| "loss": 1.0789, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.6470588235294118, | |
| "grad_norm": 0.25603345036506653, | |
| "learning_rate": 3.558823529411765e-06, | |
| "loss": 1.0452, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.24726077914237976, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 1.0663, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.6529411764705882, | |
| "grad_norm": 0.24604010581970215, | |
| "learning_rate": 3.5e-06, | |
| "loss": 1.0638, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.6558823529411765, | |
| "grad_norm": 0.25741252303123474, | |
| "learning_rate": 3.470588235294118e-06, | |
| "loss": 1.0516, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.6588235294117647, | |
| "grad_norm": 0.2364550083875656, | |
| "learning_rate": 3.4411764705882358e-06, | |
| "loss": 1.066, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.6617647058823529, | |
| "grad_norm": 0.25125953555107117, | |
| "learning_rate": 3.4117647058823532e-06, | |
| "loss": 1.0247, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.6647058823529411, | |
| "grad_norm": 0.3127348721027374, | |
| "learning_rate": 3.382352941176471e-06, | |
| "loss": 1.0668, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.6676470588235294, | |
| "grad_norm": 0.233737975358963, | |
| "learning_rate": 3.352941176470588e-06, | |
| "loss": 1.0794, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6705882352941176, | |
| "grad_norm": 0.25699761509895325, | |
| "learning_rate": 3.323529411764706e-06, | |
| "loss": 1.0511, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6735294117647059, | |
| "grad_norm": 0.24534179270267487, | |
| "learning_rate": 3.2941176470588236e-06, | |
| "loss": 1.0707, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.6764705882352942, | |
| "grad_norm": 0.2518172264099121, | |
| "learning_rate": 3.2647058823529415e-06, | |
| "loss": 1.0856, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6794117647058824, | |
| "grad_norm": 0.2618185877799988, | |
| "learning_rate": 3.2352941176470594e-06, | |
| "loss": 1.0624, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.6823529411764706, | |
| "grad_norm": 0.24736982583999634, | |
| "learning_rate": 3.205882352941177e-06, | |
| "loss": 1.0648, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.6852941176470588, | |
| "grad_norm": 0.25377774238586426, | |
| "learning_rate": 3.1764705882352943e-06, | |
| "loss": 1.0563, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.6882352941176471, | |
| "grad_norm": 0.264789342880249, | |
| "learning_rate": 3.147058823529412e-06, | |
| "loss": 1.0609, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.6911764705882353, | |
| "grad_norm": 0.23524682223796844, | |
| "learning_rate": 3.1176470588235297e-06, | |
| "loss": 1.0513, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.6941176470588235, | |
| "grad_norm": 0.2972707748413086, | |
| "learning_rate": 3.0882352941176476e-06, | |
| "loss": 1.0712, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.6970588235294117, | |
| "grad_norm": 0.2425236701965332, | |
| "learning_rate": 3.058823529411765e-06, | |
| "loss": 1.0636, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.23967207968235016, | |
| "learning_rate": 3.0294117647058826e-06, | |
| "loss": 1.0506, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.7029411764705882, | |
| "grad_norm": 0.25000911951065063, | |
| "learning_rate": 3e-06, | |
| "loss": 1.0225, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.7058823529411765, | |
| "grad_norm": 0.27761110663414, | |
| "learning_rate": 2.970588235294118e-06, | |
| "loss": 1.0557, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7088235294117647, | |
| "grad_norm": 0.23988835513591766, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 1.039, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.711764705882353, | |
| "grad_norm": 0.2365685999393463, | |
| "learning_rate": 2.9117647058823534e-06, | |
| "loss": 1.0524, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7147058823529412, | |
| "grad_norm": 0.2913731038570404, | |
| "learning_rate": 2.8823529411764704e-06, | |
| "loss": 1.0733, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7176470588235294, | |
| "grad_norm": 0.3353981375694275, | |
| "learning_rate": 2.8529411764705883e-06, | |
| "loss": 1.0579, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7205882352941176, | |
| "grad_norm": 0.3488647937774658, | |
| "learning_rate": 2.8235294117647062e-06, | |
| "loss": 1.0766, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7235294117647059, | |
| "grad_norm": 0.2559491693973541, | |
| "learning_rate": 2.7941176470588237e-06, | |
| "loss": 1.0751, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.7264705882352941, | |
| "grad_norm": 0.24440276622772217, | |
| "learning_rate": 2.7647058823529416e-06, | |
| "loss": 1.0735, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.7294117647058823, | |
| "grad_norm": 0.237368643283844, | |
| "learning_rate": 2.7352941176470595e-06, | |
| "loss": 1.0442, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.7323529411764705, | |
| "grad_norm": 0.29413533210754395, | |
| "learning_rate": 2.7058823529411766e-06, | |
| "loss": 1.0496, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.7352941176470589, | |
| "grad_norm": 0.23831024765968323, | |
| "learning_rate": 2.6764705882352945e-06, | |
| "loss": 1.0522, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.7382352941176471, | |
| "grad_norm": 0.2576645612716675, | |
| "learning_rate": 2.647058823529412e-06, | |
| "loss": 1.0257, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.7411764705882353, | |
| "grad_norm": 0.2468797266483307, | |
| "learning_rate": 2.61764705882353e-06, | |
| "loss": 1.0742, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.7441176470588236, | |
| "grad_norm": 0.2345353364944458, | |
| "learning_rate": 2.5882352941176473e-06, | |
| "loss": 1.0615, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.7470588235294118, | |
| "grad_norm": 0.2569163143634796, | |
| "learning_rate": 2.558823529411765e-06, | |
| "loss": 1.0656, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.2634453773498535, | |
| "learning_rate": 2.5294117647058823e-06, | |
| "loss": 1.0498, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.7529411764705882, | |
| "grad_norm": 0.3457936644554138, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.04, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.7558823529411764, | |
| "grad_norm": 0.24360963702201843, | |
| "learning_rate": 2.470588235294118e-06, | |
| "loss": 1.0357, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.7588235294117647, | |
| "grad_norm": 0.23760569095611572, | |
| "learning_rate": 2.4411764705882356e-06, | |
| "loss": 1.0592, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.7617647058823529, | |
| "grad_norm": 0.24314503371715546, | |
| "learning_rate": 2.411764705882353e-06, | |
| "loss": 1.0389, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.7647058823529411, | |
| "grad_norm": 0.24598494172096252, | |
| "learning_rate": 2.3823529411764705e-06, | |
| "loss": 1.0223, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.7676470588235295, | |
| "grad_norm": 0.2521425783634186, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 1.0707, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.7705882352941177, | |
| "grad_norm": 0.34780582785606384, | |
| "learning_rate": 2.323529411764706e-06, | |
| "loss": 1.0389, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.7735294117647059, | |
| "grad_norm": 0.25080588459968567, | |
| "learning_rate": 2.2941176470588234e-06, | |
| "loss": 1.0546, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7764705882352941, | |
| "grad_norm": 0.2575359344482422, | |
| "learning_rate": 2.2647058823529413e-06, | |
| "loss": 1.0652, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.7794117647058824, | |
| "grad_norm": 0.307264119386673, | |
| "learning_rate": 2.2352941176470592e-06, | |
| "loss": 1.0675, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.7823529411764706, | |
| "grad_norm": 0.24160020053386688, | |
| "learning_rate": 2.2058823529411767e-06, | |
| "loss": 1.0452, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.7852941176470588, | |
| "grad_norm": 0.24322670698165894, | |
| "learning_rate": 2.176470588235294e-06, | |
| "loss": 1.0553, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.788235294117647, | |
| "grad_norm": 0.24782130122184753, | |
| "learning_rate": 2.1470588235294117e-06, | |
| "loss": 1.0724, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.7911764705882353, | |
| "grad_norm": 0.27055615186691284, | |
| "learning_rate": 2.1176470588235296e-06, | |
| "loss": 1.0247, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.7941176470588235, | |
| "grad_norm": 0.2918650805950165, | |
| "learning_rate": 2.0882352941176475e-06, | |
| "loss": 1.0021, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.7970588235294118, | |
| "grad_norm": 0.27240893244743347, | |
| "learning_rate": 2.058823529411765e-06, | |
| "loss": 1.0386, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.26854994893074036, | |
| "learning_rate": 2.0294117647058824e-06, | |
| "loss": 1.0705, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8029411764705883, | |
| "grad_norm": 0.2381146103143692, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.0533, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8058823529411765, | |
| "grad_norm": 0.2356519252061844, | |
| "learning_rate": 1.970588235294118e-06, | |
| "loss": 1.0368, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8088235294117647, | |
| "grad_norm": 0.2659197747707367, | |
| "learning_rate": 1.9411764705882353e-06, | |
| "loss": 1.0609, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8117647058823529, | |
| "grad_norm": 0.24807916581630707, | |
| "learning_rate": 1.9117647058823528e-06, | |
| "loss": 1.0314, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.8147058823529412, | |
| "grad_norm": 0.2560952603816986, | |
| "learning_rate": 1.8823529411764707e-06, | |
| "loss": 1.0362, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8176470588235294, | |
| "grad_norm": 0.23093286156654358, | |
| "learning_rate": 1.8529411764705884e-06, | |
| "loss": 1.0337, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8205882352941176, | |
| "grad_norm": 0.24520130455493927, | |
| "learning_rate": 1.8235294117647058e-06, | |
| "loss": 1.0255, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8235294117647058, | |
| "grad_norm": 0.24421636760234833, | |
| "learning_rate": 1.7941176470588238e-06, | |
| "loss": 1.0533, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8264705882352941, | |
| "grad_norm": 0.24678398668766022, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 1.0597, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.8294117647058824, | |
| "grad_norm": 0.27443745732307434, | |
| "learning_rate": 1.735294117647059e-06, | |
| "loss": 1.0489, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.8323529411764706, | |
| "grad_norm": 0.24420900642871857, | |
| "learning_rate": 1.7058823529411766e-06, | |
| "loss": 1.0453, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.8352941176470589, | |
| "grad_norm": 0.2628645598888397, | |
| "learning_rate": 1.676470588235294e-06, | |
| "loss": 1.0557, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.8382352941176471, | |
| "grad_norm": 0.2697880268096924, | |
| "learning_rate": 1.6470588235294118e-06, | |
| "loss": 0.987, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.8411764705882353, | |
| "grad_norm": 0.2665739357471466, | |
| "learning_rate": 1.6176470588235297e-06, | |
| "loss": 1.0502, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.8441176470588235, | |
| "grad_norm": 0.2457929402589798, | |
| "learning_rate": 1.5882352941176472e-06, | |
| "loss": 1.0461, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.8470588235294118, | |
| "grad_norm": 0.2716211676597595, | |
| "learning_rate": 1.5588235294117649e-06, | |
| "loss": 1.0517, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.2448592483997345, | |
| "learning_rate": 1.5294117647058826e-06, | |
| "loss": 1.012, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.8529411764705882, | |
| "grad_norm": 0.24840906262397766, | |
| "learning_rate": 1.5e-06, | |
| "loss": 1.057, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8558823529411764, | |
| "grad_norm": 0.3201400935649872, | |
| "learning_rate": 1.4705882352941177e-06, | |
| "loss": 1.0132, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.8588235294117647, | |
| "grad_norm": 0.24024993181228638, | |
| "learning_rate": 1.4411764705882352e-06, | |
| "loss": 1.0422, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.861764705882353, | |
| "grad_norm": 0.2523883283138275, | |
| "learning_rate": 1.4117647058823531e-06, | |
| "loss": 1.0431, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.8647058823529412, | |
| "grad_norm": 0.23962493240833282, | |
| "learning_rate": 1.3823529411764708e-06, | |
| "loss": 1.0499, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.8676470588235294, | |
| "grad_norm": 0.23773916065692902, | |
| "learning_rate": 1.3529411764705883e-06, | |
| "loss": 1.0031, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.8705882352941177, | |
| "grad_norm": 0.24341008067131042, | |
| "learning_rate": 1.323529411764706e-06, | |
| "loss": 1.0457, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.8735294117647059, | |
| "grad_norm": 0.27544376254081726, | |
| "learning_rate": 1.2941176470588237e-06, | |
| "loss": 1.0574, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.8764705882352941, | |
| "grad_norm": 0.2580159604549408, | |
| "learning_rate": 1.2647058823529412e-06, | |
| "loss": 1.0042, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.8794117647058823, | |
| "grad_norm": 0.2834937274456024, | |
| "learning_rate": 1.235294117647059e-06, | |
| "loss": 1.04, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.8823529411764706, | |
| "grad_norm": 0.2423858493566513, | |
| "learning_rate": 1.2058823529411765e-06, | |
| "loss": 1.043, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.8852941176470588, | |
| "grad_norm": 0.24057364463806152, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 1.0464, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.888235294117647, | |
| "grad_norm": 0.25729063153266907, | |
| "learning_rate": 1.1470588235294117e-06, | |
| "loss": 1.0825, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.8911764705882353, | |
| "grad_norm": 0.24446231126785278, | |
| "learning_rate": 1.1176470588235296e-06, | |
| "loss": 1.0542, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.8941176470588236, | |
| "grad_norm": 0.2583821415901184, | |
| "learning_rate": 1.088235294117647e-06, | |
| "loss": 1.015, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.8970588235294118, | |
| "grad_norm": 0.23999781906604767, | |
| "learning_rate": 1.0588235294117648e-06, | |
| "loss": 1.032, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.25951138138771057, | |
| "learning_rate": 1.0294117647058825e-06, | |
| "loss": 1.0619, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9029411764705882, | |
| "grad_norm": 0.24357210099697113, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.0349, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9058823529411765, | |
| "grad_norm": 0.2540913224220276, | |
| "learning_rate": 9.705882352941176e-07, | |
| "loss": 1.0651, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.9088235294117647, | |
| "grad_norm": 0.2388201504945755, | |
| "learning_rate": 9.411764705882353e-07, | |
| "loss": 1.0361, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9117647058823529, | |
| "grad_norm": 0.26510342955589294, | |
| "learning_rate": 9.117647058823529e-07, | |
| "loss": 1.0545, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9147058823529411, | |
| "grad_norm": 0.26220619678497314, | |
| "learning_rate": 8.823529411764707e-07, | |
| "loss": 1.0607, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9176470588235294, | |
| "grad_norm": 0.24358054995536804, | |
| "learning_rate": 8.529411764705883e-07, | |
| "loss": 1.0315, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.9205882352941176, | |
| "grad_norm": 0.2402532547712326, | |
| "learning_rate": 8.235294117647059e-07, | |
| "loss": 1.0482, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.9235294117647059, | |
| "grad_norm": 0.2409527748823166, | |
| "learning_rate": 7.941176470588236e-07, | |
| "loss": 1.0179, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9264705882352942, | |
| "grad_norm": 0.23844361305236816, | |
| "learning_rate": 7.647058823529413e-07, | |
| "loss": 1.0343, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.9294117647058824, | |
| "grad_norm": 0.2569085955619812, | |
| "learning_rate": 7.352941176470589e-07, | |
| "loss": 1.0672, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.9323529411764706, | |
| "grad_norm": 0.2910473048686981, | |
| "learning_rate": 7.058823529411766e-07, | |
| "loss": 1.0306, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.9352941176470588, | |
| "grad_norm": 0.24089165031909943, | |
| "learning_rate": 6.764705882352941e-07, | |
| "loss": 1.0184, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.9382352941176471, | |
| "grad_norm": 0.25330087542533875, | |
| "learning_rate": 6.470588235294118e-07, | |
| "loss": 1.0566, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.9411764705882353, | |
| "grad_norm": 0.25225237011909485, | |
| "learning_rate": 6.176470588235295e-07, | |
| "loss": 1.0303, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9441176470588235, | |
| "grad_norm": 0.24123378098011017, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 1.021, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.9470588235294117, | |
| "grad_norm": 0.2519339323043823, | |
| "learning_rate": 5.588235294117648e-07, | |
| "loss": 1.0472, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.25272488594055176, | |
| "learning_rate": 5.294117647058824e-07, | |
| "loss": 1.0592, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.9529411764705882, | |
| "grad_norm": 0.24109138548374176, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 1.0241, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.9558823529411765, | |
| "grad_norm": 0.2700742483139038, | |
| "learning_rate": 4.7058823529411767e-07, | |
| "loss": 1.0378, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.9588235294117647, | |
| "grad_norm": 0.24684427678585052, | |
| "learning_rate": 4.4117647058823536e-07, | |
| "loss": 1.0328, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.961764705882353, | |
| "grad_norm": 0.36805665493011475, | |
| "learning_rate": 4.1176470588235295e-07, | |
| "loss": 1.0588, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.9647058823529412, | |
| "grad_norm": 0.24640294909477234, | |
| "learning_rate": 3.8235294117647064e-07, | |
| "loss": 1.0281, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.9676470588235294, | |
| "grad_norm": 0.2545859217643738, | |
| "learning_rate": 3.529411764705883e-07, | |
| "loss": 1.0663, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.9705882352941176, | |
| "grad_norm": 0.256657212972641, | |
| "learning_rate": 3.235294117647059e-07, | |
| "loss": 1.0219, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.9735294117647059, | |
| "grad_norm": 0.23593604564666748, | |
| "learning_rate": 2.9411764705882356e-07, | |
| "loss": 1.0273, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.9764705882352941, | |
| "grad_norm": 0.25067442655563354, | |
| "learning_rate": 2.647058823529412e-07, | |
| "loss": 1.0298, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.9794117647058823, | |
| "grad_norm": 0.23615114390850067, | |
| "learning_rate": 2.3529411764705883e-07, | |
| "loss": 1.0325, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.9823529411764705, | |
| "grad_norm": 0.24917052686214447, | |
| "learning_rate": 2.0588235294117647e-07, | |
| "loss": 1.0646, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.9852941176470589, | |
| "grad_norm": 0.2560918629169464, | |
| "learning_rate": 1.7647058823529414e-07, | |
| "loss": 1.0233, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.9882352941176471, | |
| "grad_norm": 0.25138577818870544, | |
| "learning_rate": 1.4705882352941178e-07, | |
| "loss": 1.0614, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.9911764705882353, | |
| "grad_norm": 0.23887692391872406, | |
| "learning_rate": 1.1764705882352942e-07, | |
| "loss": 1.019, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.9941176470588236, | |
| "grad_norm": 0.2688574492931366, | |
| "learning_rate": 8.823529411764707e-08, | |
| "loss": 1.0471, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.9970588235294118, | |
| "grad_norm": 0.5432394742965698, | |
| "learning_rate": 5.882352941176471e-08, | |
| "loss": 1.0482, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.2513401210308075, | |
| "learning_rate": 2.9411764705882354e-08, | |
| "loss": 1.0161, | |
| "step": 340 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 340, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 0, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.128366044583297e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |