| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "global_step": 745, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.997315436241611e-05, | |
| "loss": 3.1953, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.9946308724832216e-05, | |
| "loss": 3.1953, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.9919463087248324e-05, | |
| "loss": 3.1523, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.989261744966443e-05, | |
| "loss": 3.1562, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.986577181208054e-05, | |
| "loss": 3.043, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.9838926174496646e-05, | |
| "loss": 3.0391, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.9812080536912754e-05, | |
| "loss": 3.0312, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.978523489932886e-05, | |
| "loss": 2.9473, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.975838926174497e-05, | |
| "loss": 2.9316, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9731543624161076e-05, | |
| "loss": 2.9316, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9704697986577183e-05, | |
| "loss": 2.9062, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.967785234899329e-05, | |
| "loss": 2.8984, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9651006711409398e-05, | |
| "loss": 2.8652, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9624161073825505e-05, | |
| "loss": 2.8125, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9597315436241613e-05, | |
| "loss": 2.7734, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.957046979865772e-05, | |
| "loss": 2.7773, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9543624161073828e-05, | |
| "loss": 2.75, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9516778523489935e-05, | |
| "loss": 2.7695, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9489932885906042e-05, | |
| "loss": 2.7285, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.946308724832215e-05, | |
| "loss": 2.7148, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9436241610738257e-05, | |
| "loss": 2.7031, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9409395973154365e-05, | |
| "loss": 2.6816, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9382550335570472e-05, | |
| "loss": 2.6738, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.935570469798658e-05, | |
| "loss": 2.6895, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9328859060402687e-05, | |
| "loss": 2.668, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9302013422818794e-05, | |
| "loss": 2.6699, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9275167785234902e-05, | |
| "loss": 2.625, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.924832214765101e-05, | |
| "loss": 2.623, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9221476510067117e-05, | |
| "loss": 2.6055, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.919463087248322e-05, | |
| "loss": 2.5723, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.916778523489933e-05, | |
| "loss": 2.5762, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.914093959731544e-05, | |
| "loss": 2.543, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9114093959731546e-05, | |
| "loss": 2.5742, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9087248322147654e-05, | |
| "loss": 2.5508, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9060402684563758e-05, | |
| "loss": 2.541, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.903355704697987e-05, | |
| "loss": 2.5332, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9006711409395976e-05, | |
| "loss": 2.5449, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.8979865771812083e-05, | |
| "loss": 2.4922, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.895302013422819e-05, | |
| "loss": 2.5156, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.8926174496644295e-05, | |
| "loss": 2.502, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.8899328859060406e-05, | |
| "loss": 2.4941, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.8872483221476513e-05, | |
| "loss": 2.4785, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.884563758389262e-05, | |
| "loss": 2.4805, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.8818791946308724e-05, | |
| "loss": 2.4805, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.8791946308724832e-05, | |
| "loss": 2.4668, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.8765100671140943e-05, | |
| "loss": 2.4805, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.873825503355705e-05, | |
| "loss": 2.4531, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.8711409395973157e-05, | |
| "loss": 2.4375, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.868456375838926e-05, | |
| "loss": 2.4434, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.865771812080537e-05, | |
| "loss": 2.4258, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.863087248322148e-05, | |
| "loss": 2.4453, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8604026845637587e-05, | |
| "loss": 2.4297, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.857718120805369e-05, | |
| "loss": 2.4316, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.85503355704698e-05, | |
| "loss": 2.4121, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.8523489932885906e-05, | |
| "loss": 2.4258, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.8496644295302017e-05, | |
| "loss": 2.4121, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.8469798657718124e-05, | |
| "loss": 2.3984, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.8442953020134228e-05, | |
| "loss": 2.4082, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8416107382550336e-05, | |
| "loss": 2.3945, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8389261744966443e-05, | |
| "loss": 2.4277, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.8362416107382554e-05, | |
| "loss": 2.3809, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.833557046979866e-05, | |
| "loss": 2.3945, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.8308724832214765e-05, | |
| "loss": 2.3691, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.8281879194630873e-05, | |
| "loss": 2.3809, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.825503355704698e-05, | |
| "loss": 2.375, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.822818791946309e-05, | |
| "loss": 2.3711, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.8201342281879195e-05, | |
| "loss": 2.3711, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.8174496644295302e-05, | |
| "loss": 2.3711, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.814765100671141e-05, | |
| "loss": 2.377, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.8120805369127517e-05, | |
| "loss": 2.3535, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.8093959731543628e-05, | |
| "loss": 2.3359, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.8067114093959732e-05, | |
| "loss": 2.3613, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.804026845637584e-05, | |
| "loss": 2.3574, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.8013422818791947e-05, | |
| "loss": 2.3516, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7986577181208054e-05, | |
| "loss": 2.3594, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.795973154362416e-05, | |
| "loss": 2.3359, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.793288590604027e-05, | |
| "loss": 2.3438, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.7906040268456376e-05, | |
| "loss": 2.3379, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.7879194630872484e-05, | |
| "loss": 2.3359, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.7852348993288595e-05, | |
| "loss": 2.3496, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.78255033557047e-05, | |
| "loss": 2.3281, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.7798657718120806e-05, | |
| "loss": 2.3359, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.7771812080536913e-05, | |
| "loss": 2.3457, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.774496644295302e-05, | |
| "loss": 2.3223, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.771812080536913e-05, | |
| "loss": 2.3438, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.7691275167785236e-05, | |
| "loss": 2.3242, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.7664429530201343e-05, | |
| "loss": 2.3379, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.763758389261745e-05, | |
| "loss": 2.3438, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7610738255033558e-05, | |
| "loss": 2.3145, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7583892617449665e-05, | |
| "loss": 2.2949, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7557046979865773e-05, | |
| "loss": 2.3066, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.753020134228188e-05, | |
| "loss": 2.3027, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.7503355704697988e-05, | |
| "loss": 2.2949, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.7476510067114095e-05, | |
| "loss": 2.3086, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.7449664429530202e-05, | |
| "loss": 2.3086, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.742281879194631e-05, | |
| "loss": 2.3066, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.7395973154362417e-05, | |
| "loss": 2.2793, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.7369127516778525e-05, | |
| "loss": 2.3008, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.7342281879194632e-05, | |
| "loss": 2.2949, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.731543624161074e-05, | |
| "loss": 2.3086, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.7288590604026847e-05, | |
| "loss": 2.2988, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.7261744966442954e-05, | |
| "loss": 2.2715, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.7234899328859062e-05, | |
| "loss": 2.2949, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.720805369127517e-05, | |
| "loss": 2.2793, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.7181208053691277e-05, | |
| "loss": 2.2637, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.7154362416107384e-05, | |
| "loss": 2.2773, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.712751677852349e-05, | |
| "loss": 2.293, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.71006711409396e-05, | |
| "loss": 2.2793, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7073825503355706e-05, | |
| "loss": 2.2773, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.7046979865771814e-05, | |
| "loss": 2.3027, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.702013422818792e-05, | |
| "loss": 2.2676, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.699328859060403e-05, | |
| "loss": 2.2852, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.6966442953020136e-05, | |
| "loss": 2.2715, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.6939597315436243e-05, | |
| "loss": 2.2559, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.691275167785235e-05, | |
| "loss": 2.2812, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.6885906040268458e-05, | |
| "loss": 2.2676, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.6859060402684565e-05, | |
| "loss": 2.2832, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.6832214765100673e-05, | |
| "loss": 2.2617, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.680536912751678e-05, | |
| "loss": 2.2656, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.6778523489932888e-05, | |
| "loss": 2.2578, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.6751677852348995e-05, | |
| "loss": 2.2383, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6724832214765103e-05, | |
| "loss": 2.2441, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.669798657718121e-05, | |
| "loss": 2.2539, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6671140939597317e-05, | |
| "loss": 2.2441, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.6644295302013425e-05, | |
| "loss": 2.2363, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6617449664429532e-05, | |
| "loss": 2.2539, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.659060402684564e-05, | |
| "loss": 2.2539, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6563758389261747e-05, | |
| "loss": 2.248, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6536912751677854e-05, | |
| "loss": 2.2402, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6510067114093962e-05, | |
| "loss": 2.2578, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.648322147651007e-05, | |
| "loss": 2.248, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6456375838926177e-05, | |
| "loss": 2.2793, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6429530201342284e-05, | |
| "loss": 2.2227, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.640268456375839e-05, | |
| "loss": 2.2422, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.63758389261745e-05, | |
| "loss": 2.2402, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6348993288590603e-05, | |
| "loss": 2.2461, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6322147651006714e-05, | |
| "loss": 2.2246, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.629530201342282e-05, | |
| "loss": 2.2363, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.626845637583893e-05, | |
| "loss": 2.2285, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6241610738255036e-05, | |
| "loss": 2.2461, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.621476510067114e-05, | |
| "loss": 2.2129, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.618791946308725e-05, | |
| "loss": 2.2246, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6161073825503358e-05, | |
| "loss": 2.2148, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6134228187919466e-05, | |
| "loss": 2.2148, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.610738255033557e-05, | |
| "loss": 2.2207, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6080536912751677e-05, | |
| "loss": 2.1973, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6053691275167788e-05, | |
| "loss": 2.2109, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6026845637583895e-05, | |
| "loss": 2.2266, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 2.2227, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5973154362416107e-05, | |
| "loss": 2.209, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5946308724832214e-05, | |
| "loss": 2.207, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5919463087248325e-05, | |
| "loss": 2.1934, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5892617449664432e-05, | |
| "loss": 2.2129, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.586577181208054e-05, | |
| "loss": 2.207, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5838926174496644e-05, | |
| "loss": 2.2324, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.581208053691275e-05, | |
| "loss": 2.2031, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5785234899328862e-05, | |
| "loss": 2.2031, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.575838926174497e-05, | |
| "loss": 2.1992, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.5731543624161073e-05, | |
| "loss": 2.2031, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.570469798657718e-05, | |
| "loss": 2.1836, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.5677852348993288e-05, | |
| "loss": 2.2266, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.56510067114094e-05, | |
| "loss": 2.2129, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.5624161073825506e-05, | |
| "loss": 2.1973, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.559731543624161e-05, | |
| "loss": 2.2109, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.5570469798657718e-05, | |
| "loss": 2.1934, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.554362416107383e-05, | |
| "loss": 2.209, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.5516778523489936e-05, | |
| "loss": 2.1992, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.548993288590604e-05, | |
| "loss": 2.1816, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.5463087248322148e-05, | |
| "loss": 2.2109, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.5436241610738255e-05, | |
| "loss": 2.1992, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.5409395973154366e-05, | |
| "loss": 2.1699, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.5382550335570473e-05, | |
| "loss": 2.1777, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.5355704697986577e-05, | |
| "loss": 2.2148, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.5328859060402685e-05, | |
| "loss": 2.1953, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.5302013422818792e-05, | |
| "loss": 2.168, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.5275167785234903e-05, | |
| "loss": 2.1797, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.5248322147651009e-05, | |
| "loss": 2.2109, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.5221476510067114e-05, | |
| "loss": 2.1895, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.5194630872483223e-05, | |
| "loss": 2.1816, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.516778523489933e-05, | |
| "loss": 2.1621, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.5140939597315438e-05, | |
| "loss": 2.1816, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.5114093959731544e-05, | |
| "loss": 2.1777, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.5087248322147651e-05, | |
| "loss": 2.1777, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.506040268456376e-05, | |
| "loss": 2.1738, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.5033557046979868e-05, | |
| "loss": 2.1836, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.5006711409395975e-05, | |
| "loss": 2.1797, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4979865771812081e-05, | |
| "loss": 2.1738, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4953020134228188e-05, | |
| "loss": 2.1914, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.4926174496644297e-05, | |
| "loss": 2.1719, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4899328859060405e-05, | |
| "loss": 2.1777, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.487248322147651e-05, | |
| "loss": 2.1504, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.4845637583892618e-05, | |
| "loss": 2.1992, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4818791946308725e-05, | |
| "loss": 2.1855, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4791946308724835e-05, | |
| "loss": 2.1777, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.4765100671140942e-05, | |
| "loss": 2.1504, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4738255033557048e-05, | |
| "loss": 2.168, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4711409395973155e-05, | |
| "loss": 2.168, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.4684563758389262e-05, | |
| "loss": 2.1777, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.4657718120805372e-05, | |
| "loss": 2.1621, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.4630872483221479e-05, | |
| "loss": 2.1758, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.4604026845637585e-05, | |
| "loss": 2.1719, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.4577181208053692e-05, | |
| "loss": 2.1777, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.45503355704698e-05, | |
| "loss": 2.1445, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.4523489932885909e-05, | |
| "loss": 2.1562, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.4496644295302014e-05, | |
| "loss": 2.1758, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.4469798657718122e-05, | |
| "loss": 2.1738, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.444295302013423e-05, | |
| "loss": 2.1562, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.4416107382550337e-05, | |
| "loss": 2.1758, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.4389261744966446e-05, | |
| "loss": 2.1621, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.4362416107382551e-05, | |
| "loss": 2.166, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.4335570469798659e-05, | |
| "loss": 2.1582, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.4308724832214766e-05, | |
| "loss": 2.1582, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4281879194630874e-05, | |
| "loss": 2.1562, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.425503355704698e-05, | |
| "loss": 2.1641, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4228187919463088e-05, | |
| "loss": 2.1777, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4201342281879196e-05, | |
| "loss": 2.1426, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.4174496644295303e-05, | |
| "loss": 2.1504, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.414765100671141e-05, | |
| "loss": 2.1641, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.4120805369127516e-05, | |
| "loss": 2.1484, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.4093959731543626e-05, | |
| "loss": 2.1484, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.4067114093959733e-05, | |
| "loss": 2.1289, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.404026845637584e-05, | |
| "loss": 2.1426, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.4013422818791948e-05, | |
| "loss": 2.1367, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.3986577181208053e-05, | |
| "loss": 2.168, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.3959731543624163e-05, | |
| "loss": 2.123, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.393288590604027e-05, | |
| "loss": 2.1484, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.3906040268456377e-05, | |
| "loss": 2.1641, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.3879194630872483e-05, | |
| "loss": 2.1367, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.385234899328859e-05, | |
| "loss": 2.1543, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.38255033557047e-05, | |
| "loss": 2.166, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.3798657718120807e-05, | |
| "loss": 2.1094, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.3771812080536914e-05, | |
| "loss": 2.1289, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.374496644295302e-05, | |
| "loss": 2.1484, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.3718120805369128e-05, | |
| "loss": 2.1348, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.3691275167785237e-05, | |
| "loss": 2.125, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.3664429530201344e-05, | |
| "loss": 2.1426, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.363758389261745e-05, | |
| "loss": 2.1504, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.3610738255033557e-05, | |
| "loss": 2.1328, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.3583892617449665e-05, | |
| "loss": 2.1406, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.3557046979865774e-05, | |
| "loss": 2.1504, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.3530201342281881e-05, | |
| "loss": 2.127, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.3503355704697987e-05, | |
| "loss": 2.1211, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.3476510067114094e-05, | |
| "loss": 2.1367, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.3449664429530202e-05, | |
| "loss": 2.1406, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.342281879194631e-05, | |
| "loss": 2.125, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.3395973154362418e-05, | |
| "loss": 2.1445, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.3369127516778524e-05, | |
| "loss": 2.1328, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.3342281879194631e-05, | |
| "loss": 2.1328, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.331543624161074e-05, | |
| "loss": 2.127, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.3288590604026848e-05, | |
| "loss": 2.1309, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.3261744966442954e-05, | |
| "loss": 2.1426, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.3234899328859061e-05, | |
| "loss": 2.1289, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.3208053691275168e-05, | |
| "loss": 2.1387, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.3181208053691278e-05, | |
| "loss": 2.1172, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.3154362416107385e-05, | |
| "loss": 2.1309, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.312751677852349e-05, | |
| "loss": 2.1309, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.3100671140939598e-05, | |
| "loss": 2.1074, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.3073825503355706e-05, | |
| "loss": 2.0977, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3046979865771815e-05, | |
| "loss": 2.1133, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.302013422818792e-05, | |
| "loss": 2.123, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.2993288590604028e-05, | |
| "loss": 2.127, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.2966442953020135e-05, | |
| "loss": 2.1191, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.2939597315436243e-05, | |
| "loss": 2.0938, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.2912751677852352e-05, | |
| "loss": 2.1367, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.2885906040268457e-05, | |
| "loss": 2.1094, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.2859060402684565e-05, | |
| "loss": 2.1035, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.2832214765100672e-05, | |
| "loss": 2.1074, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.280536912751678e-05, | |
| "loss": 2.1348, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.2778523489932889e-05, | |
| "loss": 2.1309, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.2751677852348994e-05, | |
| "loss": 2.1055, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.2724832214765102e-05, | |
| "loss": 2.125, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.269798657718121e-05, | |
| "loss": 2.0957, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.2671140939597317e-05, | |
| "loss": 2.1309, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.2644295302013422e-05, | |
| "loss": 2.1191, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.2617449664429532e-05, | |
| "loss": 2.1211, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.2590604026845639e-05, | |
| "loss": 2.1348, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.2563758389261746e-05, | |
| "loss": 2.1191, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.2536912751677854e-05, | |
| "loss": 2.1035, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.251006711409396e-05, | |
| "loss": 2.1094, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.2483221476510069e-05, | |
| "loss": 2.1133, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.2456375838926176e-05, | |
| "loss": 2.1191, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.2429530201342283e-05, | |
| "loss": 2.082, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.2402684563758389e-05, | |
| "loss": 2.1133, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.2375838926174497e-05, | |
| "loss": 2.1113, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.2348993288590606e-05, | |
| "loss": 2.0996, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.2322147651006713e-05, | |
| "loss": 2.1094, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.229530201342282e-05, | |
| "loss": 2.0918, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.2268456375838926e-05, | |
| "loss": 2.0938, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.2241610738255034e-05, | |
| "loss": 2.1152, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.2214765100671143e-05, | |
| "loss": 2.1016, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.218791946308725e-05, | |
| "loss": 2.1152, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.2161073825503358e-05, | |
| "loss": 2.1016, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.2134228187919463e-05, | |
| "loss": 2.1055, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.210738255033557e-05, | |
| "loss": 2.0938, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.208053691275168e-05, | |
| "loss": 2.1211, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.2053691275167787e-05, | |
| "loss": 2.1016, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.2026845637583893e-05, | |
| "loss": 2.1152, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.2e-05, | |
| "loss": 2.0957, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.1973154362416108e-05, | |
| "loss": 2.0938, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.1946308724832217e-05, | |
| "loss": 2.0859, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.1919463087248324e-05, | |
| "loss": 2.0879, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.189261744966443e-05, | |
| "loss": 2.082, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.1865771812080537e-05, | |
| "loss": 2.1074, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.1838926174496645e-05, | |
| "loss": 2.1016, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.1812080536912754e-05, | |
| "loss": 2.0957, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.178523489932886e-05, | |
| "loss": 2.0684, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.1758389261744967e-05, | |
| "loss": 2.0898, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.1731543624161074e-05, | |
| "loss": 2.0977, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.1704697986577182e-05, | |
| "loss": 2.1074, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.1677852348993291e-05, | |
| "loss": 2.125, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.1651006711409397e-05, | |
| "loss": 2.1016, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.1624161073825504e-05, | |
| "loss": 2.1035, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.1597315436241611e-05, | |
| "loss": 2.084, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.1570469798657719e-05, | |
| "loss": 2.0762, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.1543624161073828e-05, | |
| "loss": 2.1133, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.1516778523489934e-05, | |
| "loss": 2.1016, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.1489932885906041e-05, | |
| "loss": 2.0742, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.1463087248322149e-05, | |
| "loss": 2.0771, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.1436241610738256e-05, | |
| "loss": 2.0879, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.1409395973154362e-05, | |
| "loss": 2.0957, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.138255033557047e-05, | |
| "loss": 2.0879, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.1355704697986578e-05, | |
| "loss": 2.082, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.1328859060402686e-05, | |
| "loss": 2.0996, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.1302013422818795e-05, | |
| "loss": 2.0801, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.1275167785234899e-05, | |
| "loss": 2.1113, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.1248322147651008e-05, | |
| "loss": 2.0996, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.1221476510067115e-05, | |
| "loss": 2.0762, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.1194630872483223e-05, | |
| "loss": 2.1094, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.1167785234899328e-05, | |
| "loss": 2.082, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.1140939597315436e-05, | |
| "loss": 2.0527, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.1114093959731545e-05, | |
| "loss": 2.0859, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.1087248322147652e-05, | |
| "loss": 2.0508, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.106040268456376e-05, | |
| "loss": 2.0977, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.1033557046979865e-05, | |
| "loss": 2.0889, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.1006711409395975e-05, | |
| "loss": 2.082, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.0979865771812082e-05, | |
| "loss": 2.0742, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.095302013422819e-05, | |
| "loss": 2.0879, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.0926174496644297e-05, | |
| "loss": 2.082, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.0899328859060403e-05, | |
| "loss": 2.0879, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.0872483221476512e-05, | |
| "loss": 2.082, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.0845637583892619e-05, | |
| "loss": 2.082, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.0818791946308726e-05, | |
| "loss": 2.0547, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.0791946308724832e-05, | |
| "loss": 2.0645, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.076510067114094e-05, | |
| "loss": 2.0703, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.0738255033557049e-05, | |
| "loss": 2.084, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.0711409395973156e-05, | |
| "loss": 2.0762, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.0684563758389264e-05, | |
| "loss": 2.0723, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.065771812080537e-05, | |
| "loss": 2.0723, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.0630872483221477e-05, | |
| "loss": 2.0723, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0604026845637586e-05, | |
| "loss": 2.084, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0577181208053693e-05, | |
| "loss": 2.0781, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0550335570469799e-05, | |
| "loss": 2.0742, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0523489932885906e-05, | |
| "loss": 2.0869, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0496644295302014e-05, | |
| "loss": 2.0625, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0469798657718123e-05, | |
| "loss": 2.0859, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.044295302013423e-05, | |
| "loss": 2.0801, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.0416107382550336e-05, | |
| "loss": 2.0566, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.0389261744966443e-05, | |
| "loss": 2.0859, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.036241610738255e-05, | |
| "loss": 2.084, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.033557046979866e-05, | |
| "loss": 2.0762, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.0308724832214767e-05, | |
| "loss": 2.0703, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.0281879194630873e-05, | |
| "loss": 2.0762, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.025503355704698e-05, | |
| "loss": 2.0762, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.0228187919463088e-05, | |
| "loss": 2.0664, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.0201342281879197e-05, | |
| "loss": 2.0586, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.0174496644295303e-05, | |
| "loss": 2.0664, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.014765100671141e-05, | |
| "loss": 2.0684, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.0120805369127517e-05, | |
| "loss": 2.0605, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.0093959731543625e-05, | |
| "loss": 2.0625, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.0067114093959734e-05, | |
| "loss": 2.0859, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.004026845637584e-05, | |
| "loss": 2.0527, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.0013422818791947e-05, | |
| "loss": 2.0703, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 9.986577181208055e-06, | |
| "loss": 2.0684, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 9.959731543624162e-06, | |
| "loss": 2.0488, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 9.93288590604027e-06, | |
| "loss": 2.0762, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 9.906040268456377e-06, | |
| "loss": 2.0508, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 9.879194630872484e-06, | |
| "loss": 2.0605, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 9.852348993288592e-06, | |
| "loss": 2.0508, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 9.825503355704699e-06, | |
| "loss": 2.0508, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.798657718120806e-06, | |
| "loss": 2.0664, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.771812080536914e-06, | |
| "loss": 2.0664, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.744966442953021e-06, | |
| "loss": 2.0859, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.718120805369129e-06, | |
| "loss": 2.0859, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.691275167785236e-06, | |
| "loss": 2.0566, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.664429530201343e-06, | |
| "loss": 2.0703, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.637583892617451e-06, | |
| "loss": 2.0645, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.610738255033558e-06, | |
| "loss": 2.0645, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.583892617449666e-06, | |
| "loss": 2.0566, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.557046979865773e-06, | |
| "loss": 2.0605, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 9.530201342281879e-06, | |
| "loss": 2.0625, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 9.503355704697988e-06, | |
| "loss": 2.0684, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 9.476510067114095e-06, | |
| "loss": 2.0566, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 9.449664429530203e-06, | |
| "loss": 2.0547, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 9.42281879194631e-06, | |
| "loss": 2.0547, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 9.395973154362416e-06, | |
| "loss": 2.0742, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 9.369127516778525e-06, | |
| "loss": 2.084, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 9.34228187919463e-06, | |
| "loss": 2.0518, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.31543624161074e-06, | |
| "loss": 2.0439, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 9.288590604026846e-06, | |
| "loss": 2.0645, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 9.261744966442953e-06, | |
| "loss": 2.0605, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 9.234899328859062e-06, | |
| "loss": 2.0664, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 9.208053691275168e-06, | |
| "loss": 2.0645, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 9.181208053691277e-06, | |
| "loss": 2.0527, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 9.154362416107383e-06, | |
| "loss": 2.0605, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.12751677852349e-06, | |
| "loss": 2.0381, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.100671140939597e-06, | |
| "loss": 2.0352, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.073825503355705e-06, | |
| "loss": 2.0352, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.046979865771814e-06, | |
| "loss": 2.0439, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.02013422818792e-06, | |
| "loss": 2.0332, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 8.993288590604027e-06, | |
| "loss": 2.0566, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 8.966442953020134e-06, | |
| "loss": 2.0625, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.939597315436242e-06, | |
| "loss": 2.0488, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.91275167785235e-06, | |
| "loss": 2.0488, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 8.885906040268457e-06, | |
| "loss": 2.0664, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.859060402684566e-06, | |
| "loss": 2.0645, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.832214765100672e-06, | |
| "loss": 2.0625, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.805369127516779e-06, | |
| "loss": 2.0547, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.778523489932886e-06, | |
| "loss": 2.0645, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.751677852348994e-06, | |
| "loss": 2.0459, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 8.724832214765101e-06, | |
| "loss": 2.043, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.697986577181209e-06, | |
| "loss": 2.0645, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.671140939597316e-06, | |
| "loss": 2.0605, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.644295302013423e-06, | |
| "loss": 2.0684, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.617449664429531e-06, | |
| "loss": 2.0508, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.590604026845638e-06, | |
| "loss": 2.0703, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 8.563758389261746e-06, | |
| "loss": 2.0488, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.536912751677853e-06, | |
| "loss": 2.0381, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.51006711409396e-06, | |
| "loss": 2.04, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.483221476510068e-06, | |
| "loss": 2.0566, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.456375838926175e-06, | |
| "loss": 2.0547, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.429530201342283e-06, | |
| "loss": 2.0449, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 8.40268456375839e-06, | |
| "loss": 2.0352, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.375838926174498e-06, | |
| "loss": 2.0605, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.348993288590605e-06, | |
| "loss": 2.0684, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.322147651006712e-06, | |
| "loss": 2.0508, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 8.29530201342282e-06, | |
| "loss": 2.0664, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 8.268456375838927e-06, | |
| "loss": 2.04, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.241610738255035e-06, | |
| "loss": 2.0371, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.214765100671142e-06, | |
| "loss": 2.0361, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.18791946308725e-06, | |
| "loss": 2.0645, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.161073825503357e-06, | |
| "loss": 2.0527, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.134228187919464e-06, | |
| "loss": 2.0518, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.10738255033557e-06, | |
| "loss": 2.0508, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.080536912751679e-06, | |
| "loss": 2.0566, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.053691275167785e-06, | |
| "loss": 2.042, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.026845637583894e-06, | |
| "loss": 2.0527, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 2.0342, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 7.973154362416107e-06, | |
| "loss": 2.0391, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 7.946308724832216e-06, | |
| "loss": 2.0361, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 7.919463087248322e-06, | |
| "loss": 2.0645, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 7.892617449664431e-06, | |
| "loss": 2.041, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 7.865771812080537e-06, | |
| "loss": 2.0312, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 7.838926174496644e-06, | |
| "loss": 2.0371, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 7.812080536912753e-06, | |
| "loss": 2.0566, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 7.785234899328859e-06, | |
| "loss": 2.0273, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 7.758389261744968e-06, | |
| "loss": 2.0352, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 7.731543624161074e-06, | |
| "loss": 2.0361, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 7.704697986577183e-06, | |
| "loss": 2.0146, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 7.677852348993289e-06, | |
| "loss": 2.0547, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 7.651006711409396e-06, | |
| "loss": 2.0439, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 7.624161073825504e-06, | |
| "loss": 2.0361, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 7.597315436241612e-06, | |
| "loss": 2.0527, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 7.570469798657719e-06, | |
| "loss": 2.0508, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 7.543624161073826e-06, | |
| "loss": 2.0547, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 7.516778523489934e-06, | |
| "loss": 2.0566, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 7.4899328859060405e-06, | |
| "loss": 2.0547, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 7.463087248322149e-06, | |
| "loss": 2.0234, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 7.436241610738255e-06, | |
| "loss": 2.043, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 7.409395973154363e-06, | |
| "loss": 2.0371, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 7.382550335570471e-06, | |
| "loss": 2.0352, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 7.3557046979865775e-06, | |
| "loss": 2.043, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 7.328859060402686e-06, | |
| "loss": 2.0537, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 7.302013422818792e-06, | |
| "loss": 2.0449, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 7.2751677852349e-06, | |
| "loss": 2.0273, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 7.248322147651007e-06, | |
| "loss": 2.0322, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 7.221476510067115e-06, | |
| "loss": 2.0381, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 7.194630872483223e-06, | |
| "loss": 2.0381, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 7.167785234899329e-06, | |
| "loss": 2.0439, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 7.140939597315437e-06, | |
| "loss": 2.04, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 7.114093959731544e-06, | |
| "loss": 2.0352, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 7.087248322147652e-06, | |
| "loss": 2.04, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 7.060402684563758e-06, | |
| "loss": 2.0117, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 7.0335570469798665e-06, | |
| "loss": 2.0342, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 7.006711409395974e-06, | |
| "loss": 2.0146, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 6.979865771812081e-06, | |
| "loss": 2.0518, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 6.953020134228189e-06, | |
| "loss": 2.0283, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 6.926174496644295e-06, | |
| "loss": 2.0322, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 6.8993288590604035e-06, | |
| "loss": 2.0508, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 6.87248322147651e-06, | |
| "loss": 2.0264, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 6.845637583892618e-06, | |
| "loss": 2.041, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 6.818791946308725e-06, | |
| "loss": 2.0264, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 6.791946308724832e-06, | |
| "loss": 2.0371, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 6.765100671140941e-06, | |
| "loss": 2.0371, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 6.738255033557047e-06, | |
| "loss": 2.042, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 6.711409395973155e-06, | |
| "loss": 2.0107, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 6.684563758389262e-06, | |
| "loss": 2.0176, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 6.65771812080537e-06, | |
| "loss": 2.0273, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 6.630872483221477e-06, | |
| "loss": 2.0469, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 6.604026845637584e-06, | |
| "loss": 2.0205, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 6.5771812080536925e-06, | |
| "loss": 2.0137, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 6.550335570469799e-06, | |
| "loss": 2.0352, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 6.523489932885907e-06, | |
| "loss": 2.0547, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 6.496644295302014e-06, | |
| "loss": 2.0137, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 6.469798657718121e-06, | |
| "loss": 2.0029, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 6.442953020134229e-06, | |
| "loss": 2.0176, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 6.416107382550336e-06, | |
| "loss": 2.0283, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 6.389261744966444e-06, | |
| "loss": 2.0488, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 6.362416107382551e-06, | |
| "loss": 2.0371, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 6.335570469798658e-06, | |
| "loss": 2.0107, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 6.308724832214766e-06, | |
| "loss": 2.0273, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 6.281879194630873e-06, | |
| "loss": 2.0508, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.25503355704698e-06, | |
| "loss": 2.0342, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.228187919463088e-06, | |
| "loss": 2.0586, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 6.2013422818791946e-06, | |
| "loss": 2.0107, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 6.174496644295303e-06, | |
| "loss": 2.0547, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 6.14765100671141e-06, | |
| "loss": 2.0273, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 6.120805369127517e-06, | |
| "loss": 2.0371, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 6.093959731543625e-06, | |
| "loss": 2.0098, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 6.067114093959732e-06, | |
| "loss": 2.0293, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 6.04026845637584e-06, | |
| "loss": 2.0186, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 6.0134228187919464e-06, | |
| "loss": 2.0146, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 5.986577181208054e-06, | |
| "loss": 2.0361, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 5.959731543624162e-06, | |
| "loss": 2.0117, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 5.932885906040269e-06, | |
| "loss": 2.0146, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 5.906040268456377e-06, | |
| "loss": 2.0264, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 5.8791946308724835e-06, | |
| "loss": 2.0264, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 5.852348993288591e-06, | |
| "loss": 2.0439, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 5.825503355704698e-06, | |
| "loss": 2.0283, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 5.798657718120806e-06, | |
| "loss": 2.0264, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.771812080536914e-06, | |
| "loss": 2.0273, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.7449664429530206e-06, | |
| "loss": 2.0254, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 5.718120805369128e-06, | |
| "loss": 2.0146, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.691275167785235e-06, | |
| "loss": 2.0273, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.664429530201343e-06, | |
| "loss": 2.0439, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 5.637583892617449e-06, | |
| "loss": 2.0293, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 5.610738255033558e-06, | |
| "loss": 2.0254, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 5.583892617449664e-06, | |
| "loss": 2.0283, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 5.5570469798657725e-06, | |
| "loss": 2.0088, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 5.53020134228188e-06, | |
| "loss": 2.0166, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 5.503355704697987e-06, | |
| "loss": 2.0283, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 5.476510067114095e-06, | |
| "loss": 2.002, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 5.449664429530201e-06, | |
| "loss": 2.0205, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 5.4228187919463095e-06, | |
| "loss": 2.0234, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 5.395973154362416e-06, | |
| "loss": 2.043, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 5.369127516778524e-06, | |
| "loss": 2.0127, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 5.342281879194632e-06, | |
| "loss": 2.041, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 5.315436241610738e-06, | |
| "loss": 2.0322, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 5.2885906040268466e-06, | |
| "loss": 2.0332, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 5.261744966442953e-06, | |
| "loss": 2.0244, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 5.234899328859061e-06, | |
| "loss": 2.0244, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.208053691275168e-06, | |
| "loss": 2.0195, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.181208053691275e-06, | |
| "loss": 2.0254, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 5.154362416107384e-06, | |
| "loss": 2.04, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 5.12751677852349e-06, | |
| "loss": 2.0244, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 5.1006711409395985e-06, | |
| "loss": 1.9902, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 5.073825503355705e-06, | |
| "loss": 2.0127, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 5.0469798657718124e-06, | |
| "loss": 2.0039, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 5.02013422818792e-06, | |
| "loss": 2.0225, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 4.993288590604027e-06, | |
| "loss": 2.0176, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 4.966442953020135e-06, | |
| "loss": 2.0107, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 4.939597315436242e-06, | |
| "loss": 2.0273, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 4.9127516778523495e-06, | |
| "loss": 2.0078, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 4.885906040268457e-06, | |
| "loss": 2.0361, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 4.859060402684564e-06, | |
| "loss": 2.0352, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 4.832214765100672e-06, | |
| "loss": 2.0381, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 4.805369127516779e-06, | |
| "loss": 2.0215, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 4.7785234899328866e-06, | |
| "loss": 2.0195, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 4.751677852348994e-06, | |
| "loss": 2.0107, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 4.724832214765101e-06, | |
| "loss": 2.0039, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 4.697986577181208e-06, | |
| "loss": 2.0186, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 4.671140939597315e-06, | |
| "loss": 2.0234, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 4.644295302013423e-06, | |
| "loss": 2.0234, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 4.617449664429531e-06, | |
| "loss": 2.0039, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 4.5906040268456384e-06, | |
| "loss": 2.0039, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 4.563758389261745e-06, | |
| "loss": 2.0215, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 4.536912751677852e-06, | |
| "loss": 2.0215, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 4.51006711409396e-06, | |
| "loss": 2.0146, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 4.483221476510067e-06, | |
| "loss": 2.0391, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 4.456375838926175e-06, | |
| "loss": 2.0137, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 4.429530201342283e-06, | |
| "loss": 2.0107, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 4.4026845637583895e-06, | |
| "loss": 2.0176, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.375838926174497e-06, | |
| "loss": 2.0273, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.348993288590604e-06, | |
| "loss": 2.0098, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 4.322147651006712e-06, | |
| "loss": 2.0215, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 4.295302013422819e-06, | |
| "loss": 2.0107, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 4.2684563758389265e-06, | |
| "loss": 2.0293, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 4.241610738255034e-06, | |
| "loss": 2.0312, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 4.214765100671141e-06, | |
| "loss": 2.0156, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 4.187919463087249e-06, | |
| "loss": 1.9922, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 4.161073825503356e-06, | |
| "loss": 2.0225, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 4.134228187919464e-06, | |
| "loss": 2.0391, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 4.107382550335571e-06, | |
| "loss": 2.0293, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 4.0805369127516784e-06, | |
| "loss": 2.0166, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 4.053691275167785e-06, | |
| "loss": 2.001, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 4.026845637583892e-06, | |
| "loss": 2.0215, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 2.0166, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.973154362416108e-06, | |
| "loss": 2.0088, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.9463087248322155e-06, | |
| "loss": 2.0156, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.919463087248322e-06, | |
| "loss": 2.0332, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.8926174496644295e-06, | |
| "loss": 1.9951, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.865771812080537e-06, | |
| "loss": 2.0215, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.838926174496644e-06, | |
| "loss": 2.0146, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.812080536912752e-06, | |
| "loss": 2.0332, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.7852348993288595e-06, | |
| "loss": 2.0449, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.758389261744967e-06, | |
| "loss": 2.0225, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.7315436241610744e-06, | |
| "loss": 2.001, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.7046979865771814e-06, | |
| "loss": 2.0146, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 3.6778523489932888e-06, | |
| "loss": 1.9932, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.651006711409396e-06, | |
| "loss": 2.0029, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.6241610738255036e-06, | |
| "loss": 2.0029, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.5973154362416114e-06, | |
| "loss": 2.0186, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 3.5704697986577184e-06, | |
| "loss": 2.0117, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 3.543624161073826e-06, | |
| "loss": 2.0186, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.5167785234899332e-06, | |
| "loss": 1.999, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.4899328859060407e-06, | |
| "loss": 2.0312, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.4630872483221476e-06, | |
| "loss": 2.0234, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.436241610738255e-06, | |
| "loss": 2.0225, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 3.4093959731543625e-06, | |
| "loss": 2.0322, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 3.3825503355704703e-06, | |
| "loss": 2.0186, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.3557046979865777e-06, | |
| "loss": 2.001, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.328859060402685e-06, | |
| "loss": 1.9971, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.302013422818792e-06, | |
| "loss": 2.0117, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.2751677852348995e-06, | |
| "loss": 1.9941, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.248322147651007e-06, | |
| "loss": 2.0127, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.2214765100671143e-06, | |
| "loss": 1.9932, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 3.194630872483222e-06, | |
| "loss": 1.9932, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 3.167785234899329e-06, | |
| "loss": 2.0059, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 3.1409395973154366e-06, | |
| "loss": 1.9932, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 3.114093959731544e-06, | |
| "loss": 2.0293, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.0872483221476514e-06, | |
| "loss": 2.0, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.0604026845637584e-06, | |
| "loss": 2.0156, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 3.033557046979866e-06, | |
| "loss": 2.0303, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 3.0067114093959732e-06, | |
| "loss": 2.0186, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.979865771812081e-06, | |
| "loss": 2.0068, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.9530201342281885e-06, | |
| "loss": 2.0381, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.9261744966442955e-06, | |
| "loss": 2.0088, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.899328859060403e-06, | |
| "loss": 1.9971, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.8724832214765103e-06, | |
| "loss": 2.0293, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.8456375838926177e-06, | |
| "loss": 1.9961, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.8187919463087247e-06, | |
| "loss": 2.0117, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.791946308724832e-06, | |
| "loss": 2.0068, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.76510067114094e-06, | |
| "loss": 2.002, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.7382550335570473e-06, | |
| "loss": 2.001, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.7114093959731548e-06, | |
| "loss": 1.998, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.684563758389262e-06, | |
| "loss": 2.0156, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.657718120805369e-06, | |
| "loss": 2.0322, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.6308724832214766e-06, | |
| "loss": 2.0156, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.604026845637584e-06, | |
| "loss": 2.001, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.577181208053692e-06, | |
| "loss": 1.9922, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.5503355704697992e-06, | |
| "loss": 1.9883, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 2.5234899328859062e-06, | |
| "loss": 2.0176, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.4966442953020136e-06, | |
| "loss": 2.0039, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.469798657718121e-06, | |
| "loss": 1.9961, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.4429530201342285e-06, | |
| "loss": 2.002, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.416107382550336e-06, | |
| "loss": 2.0205, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.3892617449664433e-06, | |
| "loss": 1.9971, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.3624161073825507e-06, | |
| "loss": 2.0088, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.3355704697986577e-06, | |
| "loss": 2.0254, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.3087248322147655e-06, | |
| "loss": 2.0117, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.2818791946308725e-06, | |
| "loss": 1.9961, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.25503355704698e-06, | |
| "loss": 2.0059, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.2281879194630873e-06, | |
| "loss": 2.0029, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.2013422818791947e-06, | |
| "loss": 2.0098, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.174496644295302e-06, | |
| "loss": 2.0254, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.1476510067114096e-06, | |
| "loss": 2.0107, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.120805369127517e-06, | |
| "loss": 2.0059, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.0939597315436244e-06, | |
| "loss": 2.0107, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.067114093959732e-06, | |
| "loss": 1.9922, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.0402684563758392e-06, | |
| "loss": 2.0137, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 2.013422818791946e-06, | |
| "loss": 1.9912, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 1.986577181208054e-06, | |
| "loss": 2.0146, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 1.959731543624161e-06, | |
| "loss": 2.0039, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.9328859060402684e-06, | |
| "loss": 1.9961, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.906040268456376e-06, | |
| "loss": 2.0254, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.8791946308724835e-06, | |
| "loss": 2.0156, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 1.8523489932885907e-06, | |
| "loss": 2.001, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 1.825503355704698e-06, | |
| "loss": 2.0029, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 1.7986577181208057e-06, | |
| "loss": 2.0068, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 1.771812080536913e-06, | |
| "loss": 2.0234, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 1.7449664429530203e-06, | |
| "loss": 1.9824, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.7181208053691275e-06, | |
| "loss": 2.0049, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 1.6912751677852351e-06, | |
| "loss": 2.0029, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 1.6644295302013426e-06, | |
| "loss": 2.0205, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.6375838926174498e-06, | |
| "loss": 2.0029, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.6107382550335572e-06, | |
| "loss": 2.0166, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.5838926174496646e-06, | |
| "loss": 2.0059, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 1.557046979865772e-06, | |
| "loss": 2.0186, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.5302013422818792e-06, | |
| "loss": 2.0088, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.5033557046979866e-06, | |
| "loss": 1.9961, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 1.4765100671140942e-06, | |
| "loss": 2.0107, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.4496644295302014e-06, | |
| "loss": 2.0234, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.4228187919463088e-06, | |
| "loss": 2.0098, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.395973154362416e-06, | |
| "loss": 2.0234, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.3691275167785237e-06, | |
| "loss": 2.0254, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.342281879194631e-06, | |
| "loss": 2.0029, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 1.3154362416107383e-06, | |
| "loss": 2.0146, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.288590604026846e-06, | |
| "loss": 2.0, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.2617449664429531e-06, | |
| "loss": 2.0098, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 1.2348993288590605e-06, | |
| "loss": 2.0293, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.208053691275168e-06, | |
| "loss": 2.0059, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.1812080536912753e-06, | |
| "loss": 1.9971, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.1543624161073828e-06, | |
| "loss": 2.0303, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.12751677852349e-06, | |
| "loss": 2.0088, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.1006711409395974e-06, | |
| "loss": 2.0068, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.0738255033557048e-06, | |
| "loss": 1.9951, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.0469798657718122e-06, | |
| "loss": 2.0264, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.0201342281879196e-06, | |
| "loss": 1.9951, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 9.93288590604027e-07, | |
| "loss": 1.999, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 9.664429530201342e-07, | |
| "loss": 2.0, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 9.395973154362417e-07, | |
| "loss": 2.002, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 9.12751677852349e-07, | |
| "loss": 2.0156, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 8.859060402684565e-07, | |
| "loss": 2.0117, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 8.590604026845638e-07, | |
| "loss": 1.9902, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 8.322147651006713e-07, | |
| "loss": 1.9932, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 8.053691275167786e-07, | |
| "loss": 2.001, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 7.78523489932886e-07, | |
| "loss": 2.0059, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 7.516778523489933e-07, | |
| "loss": 2.0078, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 7.248322147651007e-07, | |
| "loss": 2.0049, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 6.97986577181208e-07, | |
| "loss": 2.002, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 6.711409395973155e-07, | |
| "loss": 2.0117, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 6.44295302013423e-07, | |
| "loss": 1.9932, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 6.174496644295303e-07, | |
| "loss": 1.998, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 5.906040268456377e-07, | |
| "loss": 2.0146, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 5.63758389261745e-07, | |
| "loss": 2.0049, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 5.369127516778524e-07, | |
| "loss": 1.999, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 5.100671140939598e-07, | |
| "loss": 2.0176, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 4.832214765100671e-07, | |
| "loss": 2.0098, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 4.563758389261745e-07, | |
| "loss": 2.0068, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 4.295302013422819e-07, | |
| "loss": 2.0303, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 4.026845637583893e-07, | |
| "loss": 2.0137, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 3.7583892617449665e-07, | |
| "loss": 2.0098, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 3.48993288590604e-07, | |
| "loss": 2.0244, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 3.221476510067115e-07, | |
| "loss": 1.9922, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.9530201342281884e-07, | |
| "loss": 1.998, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.684563758389262e-07, | |
| "loss": 1.9961, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 2.4161073825503355e-07, | |
| "loss": 1.9951, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.1476510067114094e-07, | |
| "loss": 2.0117, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 1.8791946308724833e-07, | |
| "loss": 2.0049, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.6107382550335574e-07, | |
| "loss": 2.0146, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.342281879194631e-07, | |
| "loss": 2.0303, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.0738255033557047e-07, | |
| "loss": 2.0078, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 8.053691275167787e-08, | |
| "loss": 2.0039, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 5.3691275167785235e-08, | |
| "loss": 1.998, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.6845637583892618e-08, | |
| "loss": 2.0186, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.9941, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 745, | |
| "total_flos": 1.4940678387662848e+17, | |
| "train_loss": 2.1389746749161076, | |
| "train_runtime": 581.9582, | |
| "train_samples_per_second": 655.013, | |
| "train_steps_per_second": 1.28 | |
| } | |
| ], | |
| "max_steps": 745, | |
| "num_train_epochs": 5, | |
| "total_flos": 1.4940678387662848e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |