| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9998451612903225, | |
| "eval_steps": 500, | |
| "global_step": 4843, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002064516129032258, | |
| "grad_norm": 3.8347715644338702, | |
| "learning_rate": 2.0618556701030925e-08, | |
| "loss": 0.4845, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.004129032258064516, | |
| "grad_norm": 3.622781800764145, | |
| "learning_rate": 4.123711340206185e-08, | |
| "loss": 0.4768, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.006193548387096774, | |
| "grad_norm": 3.2353201981730644, | |
| "learning_rate": 6.185567010309278e-08, | |
| "loss": 0.4652, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.008258064516129033, | |
| "grad_norm": 2.947330357126175, | |
| "learning_rate": 8.24742268041237e-08, | |
| "loss": 0.4542, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01032258064516129, | |
| "grad_norm": 2.417908848980943, | |
| "learning_rate": 1.0309278350515462e-07, | |
| "loss": 0.4363, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.012387096774193548, | |
| "grad_norm": 1.74970179232699, | |
| "learning_rate": 1.2371134020618556e-07, | |
| "loss": 0.4185, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.014451612903225806, | |
| "grad_norm": 1.4949679798399427, | |
| "learning_rate": 1.4432989690721648e-07, | |
| "loss": 0.4099, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.016516129032258065, | |
| "grad_norm": 1.2117967081127008, | |
| "learning_rate": 1.649484536082474e-07, | |
| "loss": 0.3901, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.01858064516129032, | |
| "grad_norm": 1.0236940060259172, | |
| "learning_rate": 1.8556701030927835e-07, | |
| "loss": 0.3781, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02064516129032258, | |
| "grad_norm": 0.8673034546104332, | |
| "learning_rate": 2.0618556701030925e-07, | |
| "loss": 0.3732, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.02270967741935484, | |
| "grad_norm": 0.8133872914570585, | |
| "learning_rate": 2.268041237113402e-07, | |
| "loss": 0.3728, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.024774193548387096, | |
| "grad_norm": 0.8191926249288893, | |
| "learning_rate": 2.474226804123711e-07, | |
| "loss": 0.3613, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.026838709677419356, | |
| "grad_norm": 0.7422414846989145, | |
| "learning_rate": 2.6804123711340204e-07, | |
| "loss": 0.359, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.02890322580645161, | |
| "grad_norm": 0.7201726657809826, | |
| "learning_rate": 2.8865979381443296e-07, | |
| "loss": 0.3598, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03096774193548387, | |
| "grad_norm": 0.77452988304961, | |
| "learning_rate": 3.0927835051546394e-07, | |
| "loss": 0.3557, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.03303225806451613, | |
| "grad_norm": 0.7191351812558439, | |
| "learning_rate": 3.298969072164948e-07, | |
| "loss": 0.3508, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.03509677419354839, | |
| "grad_norm": 0.6938460773620077, | |
| "learning_rate": 3.5051546391752573e-07, | |
| "loss": 0.3512, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.03716129032258064, | |
| "grad_norm": 0.6640697172414236, | |
| "learning_rate": 3.711340206185567e-07, | |
| "loss": 0.3498, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0392258064516129, | |
| "grad_norm": 0.6978131389871531, | |
| "learning_rate": 3.917525773195876e-07, | |
| "loss": 0.3372, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.04129032258064516, | |
| "grad_norm": 0.6748840406060271, | |
| "learning_rate": 4.123711340206185e-07, | |
| "loss": 0.3473, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.04335483870967742, | |
| "grad_norm": 0.6903300485581471, | |
| "learning_rate": 4.3298969072164947e-07, | |
| "loss": 0.3436, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.04541935483870968, | |
| "grad_norm": 0.6900391616850294, | |
| "learning_rate": 4.536082474226804e-07, | |
| "loss": 0.3458, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.04748387096774193, | |
| "grad_norm": 0.7042637181386346, | |
| "learning_rate": 4.742268041237113e-07, | |
| "loss": 0.3377, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04954838709677419, | |
| "grad_norm": 0.659813926490704, | |
| "learning_rate": 4.948453608247422e-07, | |
| "loss": 0.3387, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.05161290322580645, | |
| "grad_norm": 0.6515723207892065, | |
| "learning_rate": 5.154639175257731e-07, | |
| "loss": 0.3443, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.05367741935483871, | |
| "grad_norm": 0.6545766074333088, | |
| "learning_rate": 5.360824742268041e-07, | |
| "loss": 0.342, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.05574193548387097, | |
| "grad_norm": 0.6637287687009573, | |
| "learning_rate": 5.56701030927835e-07, | |
| "loss": 0.3343, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.05780645161290322, | |
| "grad_norm": 0.6483801784518098, | |
| "learning_rate": 5.773195876288659e-07, | |
| "loss": 0.3315, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.05987096774193548, | |
| "grad_norm": 0.6709786596445975, | |
| "learning_rate": 5.979381443298969e-07, | |
| "loss": 0.3449, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.06193548387096774, | |
| "grad_norm": 0.6388273578010589, | |
| "learning_rate": 6.185567010309279e-07, | |
| "loss": 0.3331, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 0.6215237839848085, | |
| "learning_rate": 6.391752577319586e-07, | |
| "loss": 0.3364, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.06606451612903226, | |
| "grad_norm": 0.6960918549923397, | |
| "learning_rate": 6.597938144329896e-07, | |
| "loss": 0.3434, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.06812903225806452, | |
| "grad_norm": 0.6810690836972719, | |
| "learning_rate": 6.804123711340206e-07, | |
| "loss": 0.3368, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.07019354838709678, | |
| "grad_norm": 0.6358648816123277, | |
| "learning_rate": 7.010309278350515e-07, | |
| "loss": 0.3338, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.07225806451612904, | |
| "grad_norm": 0.668088207474176, | |
| "learning_rate": 7.216494845360824e-07, | |
| "loss": 0.3414, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.07432258064516128, | |
| "grad_norm": 0.6478792898005766, | |
| "learning_rate": 7.422680412371134e-07, | |
| "loss": 0.3364, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.07638709677419354, | |
| "grad_norm": 0.6795998111394932, | |
| "learning_rate": 7.628865979381443e-07, | |
| "loss": 0.327, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.0784516129032258, | |
| "grad_norm": 0.6525046715909505, | |
| "learning_rate": 7.835051546391752e-07, | |
| "loss": 0.3348, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.08051612903225806, | |
| "grad_norm": 0.685633155774819, | |
| "learning_rate": 8.041237113402062e-07, | |
| "loss": 0.3314, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.08258064516129032, | |
| "grad_norm": 0.6308341522120817, | |
| "learning_rate": 8.24742268041237e-07, | |
| "loss": 0.3346, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.08464516129032258, | |
| "grad_norm": 0.6777484559645547, | |
| "learning_rate": 8.45360824742268e-07, | |
| "loss": 0.332, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.08670967741935484, | |
| "grad_norm": 0.6664332905967745, | |
| "learning_rate": 8.659793814432989e-07, | |
| "loss": 0.3276, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0887741935483871, | |
| "grad_norm": 0.678260164078579, | |
| "learning_rate": 8.865979381443298e-07, | |
| "loss": 0.3347, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.09083870967741936, | |
| "grad_norm": 0.6639398379059098, | |
| "learning_rate": 9.072164948453608e-07, | |
| "loss": 0.3349, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.09290322580645162, | |
| "grad_norm": 0.6544729326125452, | |
| "learning_rate": 9.278350515463918e-07, | |
| "loss": 0.3351, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.09496774193548387, | |
| "grad_norm": 0.6736956960554942, | |
| "learning_rate": 9.484536082474226e-07, | |
| "loss": 0.3364, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.09703225806451612, | |
| "grad_norm": 0.6798760919991121, | |
| "learning_rate": 9.690721649484535e-07, | |
| "loss": 0.3329, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.09909677419354838, | |
| "grad_norm": 0.6860410772296518, | |
| "learning_rate": 9.896907216494845e-07, | |
| "loss": 0.3267, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.10116129032258064, | |
| "grad_norm": 0.6724242514524695, | |
| "learning_rate": 9.999967520836107e-07, | |
| "loss": 0.3301, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.1032258064516129, | |
| "grad_norm": 0.7146272907830368, | |
| "learning_rate": 9.999707690056706e-07, | |
| "loss": 0.3405, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.10529032258064516, | |
| "grad_norm": 0.6602284351487545, | |
| "learning_rate": 9.999188042000401e-07, | |
| "loss": 0.3331, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.10735483870967742, | |
| "grad_norm": 0.6500269744684952, | |
| "learning_rate": 9.998408603671476e-07, | |
| "loss": 0.3349, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.10941935483870968, | |
| "grad_norm": 0.6659080270743315, | |
| "learning_rate": 9.997369415574612e-07, | |
| "loss": 0.3255, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.11148387096774194, | |
| "grad_norm": 0.6616751264530504, | |
| "learning_rate": 9.996070531712766e-07, | |
| "loss": 0.3277, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.1135483870967742, | |
| "grad_norm": 0.6964651744599499, | |
| "learning_rate": 9.99451201958438e-07, | |
| "loss": 0.3317, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.11561290322580645, | |
| "grad_norm": 0.6322033648536264, | |
| "learning_rate": 9.992693960179864e-07, | |
| "loss": 0.3274, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1176774193548387, | |
| "grad_norm": 0.6651477736231276, | |
| "learning_rate": 9.99061644797739e-07, | |
| "loss": 0.3362, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.11974193548387096, | |
| "grad_norm": 0.6508795261048231, | |
| "learning_rate": 9.988279590937983e-07, | |
| "loss": 0.3304, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.12180645161290322, | |
| "grad_norm": 0.6364459903886801, | |
| "learning_rate": 9.985683510499907e-07, | |
| "loss": 0.3242, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.12387096774193548, | |
| "grad_norm": 0.6619954180634886, | |
| "learning_rate": 9.982828341572362e-07, | |
| "loss": 0.329, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.12593548387096773, | |
| "grad_norm": 0.6413848598383421, | |
| "learning_rate": 9.979714232528463e-07, | |
| "loss": 0.3229, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 0.6424245333342599, | |
| "learning_rate": 9.97634134519754e-07, | |
| "loss": 0.3311, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.13006451612903225, | |
| "grad_norm": 0.6344772952123243, | |
| "learning_rate": 9.972709854856716e-07, | |
| "loss": 0.3306, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.13212903225806452, | |
| "grad_norm": 0.6790645384085968, | |
| "learning_rate": 9.968819950221812e-07, | |
| "loss": 0.3218, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.13419354838709677, | |
| "grad_norm": 0.6360664660727335, | |
| "learning_rate": 9.964671833437533e-07, | |
| "loss": 0.3296, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.13625806451612904, | |
| "grad_norm": 0.6708293122866612, | |
| "learning_rate": 9.960265720066961e-07, | |
| "loss": 0.3194, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.13832258064516129, | |
| "grad_norm": 0.682103425250404, | |
| "learning_rate": 9.95560183908036e-07, | |
| "loss": 0.3216, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.14038709677419356, | |
| "grad_norm": 0.649381758646969, | |
| "learning_rate": 9.950680432843267e-07, | |
| "loss": 0.326, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.1424516129032258, | |
| "grad_norm": 0.6584415316713365, | |
| "learning_rate": 9.94550175710391e-07, | |
| "loss": 0.3268, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.14451612903225808, | |
| "grad_norm": 0.6299571550013747, | |
| "learning_rate": 9.940066080979909e-07, | |
| "loss": 0.3233, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.14658064516129032, | |
| "grad_norm": 0.6538409379488348, | |
| "learning_rate": 9.934373686944286e-07, | |
| "loss": 0.3259, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.14864516129032257, | |
| "grad_norm": 0.6261303918046864, | |
| "learning_rate": 9.928424870810804e-07, | |
| "loss": 0.3227, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.15070967741935484, | |
| "grad_norm": 0.6504421655013074, | |
| "learning_rate": 9.922219941718576e-07, | |
| "loss": 0.3245, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1527741935483871, | |
| "grad_norm": 0.6338912710677398, | |
| "learning_rate": 9.915759222116008e-07, | |
| "loss": 0.3272, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.15483870967741936, | |
| "grad_norm": 0.6483630749442493, | |
| "learning_rate": 9.909043047744044e-07, | |
| "loss": 0.3322, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.1569032258064516, | |
| "grad_norm": 0.6427038784083127, | |
| "learning_rate": 9.902071767618715e-07, | |
| "loss": 0.3259, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.15896774193548388, | |
| "grad_norm": 0.6384424550604813, | |
| "learning_rate": 9.894845744013002e-07, | |
| "loss": 0.3299, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.16103225806451613, | |
| "grad_norm": 0.6436281486654595, | |
| "learning_rate": 9.887365352438015e-07, | |
| "loss": 0.3202, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.1630967741935484, | |
| "grad_norm": 0.6600853885272884, | |
| "learning_rate": 9.879630981623475e-07, | |
| "loss": 0.3316, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.16516129032258065, | |
| "grad_norm": 0.7273173646706375, | |
| "learning_rate": 9.871643033497512e-07, | |
| "loss": 0.3268, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.1672258064516129, | |
| "grad_norm": 0.6475774878384731, | |
| "learning_rate": 9.863401923165778e-07, | |
| "loss": 0.3279, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.16929032258064516, | |
| "grad_norm": 0.6616526425163223, | |
| "learning_rate": 9.854908078889881e-07, | |
| "loss": 0.3241, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.1713548387096774, | |
| "grad_norm": 0.6582121362859629, | |
| "learning_rate": 9.846161942065123e-07, | |
| "loss": 0.3256, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.17341935483870968, | |
| "grad_norm": 0.6359674660372312, | |
| "learning_rate": 9.837163967197567e-07, | |
| "loss": 0.3254, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.17548387096774193, | |
| "grad_norm": 0.6686054614543271, | |
| "learning_rate": 9.827914621880412e-07, | |
| "loss": 0.3271, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1775483870967742, | |
| "grad_norm": 0.64804057560635, | |
| "learning_rate": 9.818414386769702e-07, | |
| "loss": 0.3286, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.17961290322580645, | |
| "grad_norm": 0.6412450081891817, | |
| "learning_rate": 9.808663755559345e-07, | |
| "loss": 0.3235, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.18167741935483872, | |
| "grad_norm": 0.6233022789634588, | |
| "learning_rate": 9.798663234955452e-07, | |
| "loss": 0.325, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.18374193548387097, | |
| "grad_norm": 0.6103780146802401, | |
| "learning_rate": 9.788413344650013e-07, | |
| "loss": 0.3229, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.18580645161290324, | |
| "grad_norm": 0.6990894346417448, | |
| "learning_rate": 9.777914617293884e-07, | |
| "loss": 0.3167, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.18787096774193549, | |
| "grad_norm": 0.668098707213347, | |
| "learning_rate": 9.767167598469108e-07, | |
| "loss": 0.3242, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.18993548387096773, | |
| "grad_norm": 0.6305920111862352, | |
| "learning_rate": 9.756172846660576e-07, | |
| "loss": 0.326, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 0.6201553079735226, | |
| "learning_rate": 9.744930933226977e-07, | |
| "loss": 0.3292, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.19406451612903225, | |
| "grad_norm": 0.6555873449992747, | |
| "learning_rate": 9.73344244237114e-07, | |
| "loss": 0.3252, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.19612903225806452, | |
| "grad_norm": 0.6173747729705402, | |
| "learning_rate": 9.721707971109645e-07, | |
| "loss": 0.3221, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.19819354838709677, | |
| "grad_norm": 0.6409642693454491, | |
| "learning_rate": 9.709728129241817e-07, | |
| "loss": 0.3183, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.20025806451612904, | |
| "grad_norm": 0.692958036040751, | |
| "learning_rate": 9.69750353931803e-07, | |
| "loss": 0.321, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.2023225806451613, | |
| "grad_norm": 0.6540304743009737, | |
| "learning_rate": 9.685034836607358e-07, | |
| "loss": 0.3306, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.20438709677419356, | |
| "grad_norm": 0.606153875842293, | |
| "learning_rate": 9.672322669064552e-07, | |
| "loss": 0.3267, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.2064516129032258, | |
| "grad_norm": 0.6355082616879962, | |
| "learning_rate": 9.659367697296393e-07, | |
| "loss": 0.3246, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.20851612903225805, | |
| "grad_norm": 0.6540770595021805, | |
| "learning_rate": 9.646170594527327e-07, | |
| "loss": 0.3207, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.21058064516129033, | |
| "grad_norm": 0.6427115148761209, | |
| "learning_rate": 9.632732046564517e-07, | |
| "loss": 0.3341, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.21264516129032257, | |
| "grad_norm": 0.6583782369634305, | |
| "learning_rate": 9.619052751762172e-07, | |
| "loss": 0.3258, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.21470967741935484, | |
| "grad_norm": 0.6465456812217837, | |
| "learning_rate": 9.605133420985276e-07, | |
| "loss": 0.3197, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.2167741935483871, | |
| "grad_norm": 0.6392843901176241, | |
| "learning_rate": 9.590974777572643e-07, | |
| "loss": 0.3196, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.21883870967741936, | |
| "grad_norm": 0.6928785215501877, | |
| "learning_rate": 9.576577557299323e-07, | |
| "loss": 0.3238, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.2209032258064516, | |
| "grad_norm": 0.6375416863344153, | |
| "learning_rate": 9.561942508338368e-07, | |
| "loss": 0.328, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.22296774193548388, | |
| "grad_norm": 0.630092402055495, | |
| "learning_rate": 9.547070391221953e-07, | |
| "loss": 0.321, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.22503225806451613, | |
| "grad_norm": 0.6429556555744979, | |
| "learning_rate": 9.531961978801855e-07, | |
| "loss": 0.3289, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.2270967741935484, | |
| "grad_norm": 0.6477437711690731, | |
| "learning_rate": 9.516618056209291e-07, | |
| "loss": 0.3174, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.22916129032258065, | |
| "grad_norm": 0.6374229581293065, | |
| "learning_rate": 9.501039420814111e-07, | |
| "loss": 0.3236, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2312258064516129, | |
| "grad_norm": 0.6333096872853811, | |
| "learning_rate": 9.485226882183372e-07, | |
| "loss": 0.3272, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.23329032258064517, | |
| "grad_norm": 0.6590626784400708, | |
| "learning_rate": 9.469181262039256e-07, | |
| "loss": 0.3283, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.2353548387096774, | |
| "grad_norm": 0.6597258358538342, | |
| "learning_rate": 9.45290339421638e-07, | |
| "loss": 0.3178, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.23741935483870968, | |
| "grad_norm": 0.6726623465784939, | |
| "learning_rate": 9.436394124618454e-07, | |
| "loss": 0.3322, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.23948387096774193, | |
| "grad_norm": 0.6601259805802864, | |
| "learning_rate": 9.419654311174329e-07, | |
| "loss": 0.3191, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.2415483870967742, | |
| "grad_norm": 0.6466405190915544, | |
| "learning_rate": 9.40268482379341e-07, | |
| "loss": 0.3283, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.24361290322580645, | |
| "grad_norm": 0.6411852843547549, | |
| "learning_rate": 9.385486544320451e-07, | |
| "loss": 0.3227, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.24567741935483872, | |
| "grad_norm": 0.6402118423685845, | |
| "learning_rate": 9.368060366489732e-07, | |
| "loss": 0.3178, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.24774193548387097, | |
| "grad_norm": 0.6524107490419644, | |
| "learning_rate": 9.350407195878607e-07, | |
| "loss": 0.3242, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.2498064516129032, | |
| "grad_norm": 0.6513244138766866, | |
| "learning_rate": 9.332527949860451e-07, | |
| "loss": 0.3281, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.25187096774193546, | |
| "grad_norm": 0.669880198956457, | |
| "learning_rate": 9.314423557556986e-07, | |
| "loss": 0.3248, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.25393548387096776, | |
| "grad_norm": 0.6356540042907762, | |
| "learning_rate": 9.296094959789994e-07, | |
| "loss": 0.3166, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 0.666974189133567, | |
| "learning_rate": 9.277543109032433e-07, | |
| "loss": 0.3224, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.25806451612903225, | |
| "grad_norm": 0.6513050299994531, | |
| "learning_rate": 9.258768969358933e-07, | |
| "loss": 0.3232, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.2601290322580645, | |
| "grad_norm": 0.6481734665370465, | |
| "learning_rate": 9.2397735163957e-07, | |
| "loss": 0.3175, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.2621935483870968, | |
| "grad_norm": 0.643020157091917, | |
| "learning_rate": 9.220557737269816e-07, | |
| "loss": 0.3218, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.26425806451612904, | |
| "grad_norm": 0.6426811969692795, | |
| "learning_rate": 9.201122630557943e-07, | |
| "loss": 0.3275, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.2663225806451613, | |
| "grad_norm": 0.6947621385955748, | |
| "learning_rate": 9.181469206234422e-07, | |
| "loss": 0.3285, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.26838709677419353, | |
| "grad_norm": 0.6312730128131891, | |
| "learning_rate": 9.161598485618803e-07, | |
| "loss": 0.3236, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.2704516129032258, | |
| "grad_norm": 0.6250577395835617, | |
| "learning_rate": 9.141511501322758e-07, | |
| "loss": 0.319, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2725161290322581, | |
| "grad_norm": 0.6761809621959772, | |
| "learning_rate": 9.121209297196424e-07, | |
| "loss": 0.324, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.2745806451612903, | |
| "grad_norm": 0.6559064362731541, | |
| "learning_rate": 9.100692928274158e-07, | |
| "loss": 0.3235, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.27664516129032257, | |
| "grad_norm": 0.6470753912100222, | |
| "learning_rate": 9.079963460719714e-07, | |
| "loss": 0.3136, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.2787096774193548, | |
| "grad_norm": 0.67716550873512, | |
| "learning_rate": 9.059021971770828e-07, | |
| "loss": 0.323, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.2807741935483871, | |
| "grad_norm": 0.6526933363094527, | |
| "learning_rate": 9.037869549683252e-07, | |
| "loss": 0.3158, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.28283870967741936, | |
| "grad_norm": 0.616298302365804, | |
| "learning_rate": 9.016507293674187e-07, | |
| "loss": 0.3248, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.2849032258064516, | |
| "grad_norm": 0.6404102199659266, | |
| "learning_rate": 8.994936313865171e-07, | |
| "loss": 0.3271, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.28696774193548386, | |
| "grad_norm": 0.6412752672717336, | |
| "learning_rate": 8.973157731224385e-07, | |
| "loss": 0.3299, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.28903225806451616, | |
| "grad_norm": 0.6297868348665913, | |
| "learning_rate": 8.9511726775084e-07, | |
| "loss": 0.3225, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.2910967741935484, | |
| "grad_norm": 0.6325507536718176, | |
| "learning_rate": 8.928982295203367e-07, | |
| "loss": 0.319, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.29316129032258065, | |
| "grad_norm": 0.6472477044345786, | |
| "learning_rate": 8.906587737465642e-07, | |
| "loss": 0.3228, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.2952258064516129, | |
| "grad_norm": 0.6783964173187546, | |
| "learning_rate": 8.883990168061863e-07, | |
| "loss": 0.3211, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.29729032258064514, | |
| "grad_norm": 0.624197836950835, | |
| "learning_rate": 8.861190761308472e-07, | |
| "loss": 0.3211, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.29935483870967744, | |
| "grad_norm": 0.6717372599198446, | |
| "learning_rate": 8.838190702010693e-07, | |
| "loss": 0.3231, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.3014193548387097, | |
| "grad_norm": 0.6769990705574879, | |
| "learning_rate": 8.814991185400951e-07, | |
| "loss": 0.3208, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.30348387096774193, | |
| "grad_norm": 0.6659941754213017, | |
| "learning_rate": 8.791593417076781e-07, | |
| "loss": 0.3164, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.3055483870967742, | |
| "grad_norm": 0.6414833851246282, | |
| "learning_rate": 8.767998612938152e-07, | |
| "loss": 0.3206, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.3076129032258065, | |
| "grad_norm": 0.6514676034582378, | |
| "learning_rate": 8.744207999124301e-07, | |
| "loss": 0.3263, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3096774193548387, | |
| "grad_norm": 0.6489872854216381, | |
| "learning_rate": 8.720222811950004e-07, | |
| "loss": 0.3154, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.31174193548387097, | |
| "grad_norm": 0.623717284587322, | |
| "learning_rate": 8.696044297841334e-07, | |
| "loss": 0.3176, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.3138064516129032, | |
| "grad_norm": 0.6504605677148272, | |
| "learning_rate": 8.671673713270886e-07, | |
| "loss": 0.3157, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.31587096774193546, | |
| "grad_norm": 0.6566805248169392, | |
| "learning_rate": 8.647112324692481e-07, | |
| "loss": 0.3188, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.31793548387096776, | |
| "grad_norm": 0.646097457626664, | |
| "learning_rate": 8.622361408475361e-07, | |
| "loss": 0.3203, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.6545852284748576, | |
| "learning_rate": 8.597422250837848e-07, | |
| "loss": 0.3239, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.32206451612903225, | |
| "grad_norm": 0.6572912261816161, | |
| "learning_rate": 8.572296147780515e-07, | |
| "loss": 0.3161, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.3241290322580645, | |
| "grad_norm": 0.641031543823275, | |
| "learning_rate": 8.546984405018834e-07, | |
| "loss": 0.327, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.3261935483870968, | |
| "grad_norm": 0.646834473464689, | |
| "learning_rate": 8.521488337915318e-07, | |
| "loss": 0.328, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.32825806451612904, | |
| "grad_norm": 0.7030362083327168, | |
| "learning_rate": 8.49580927141117e-07, | |
| "loss": 0.3218, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.3303225806451613, | |
| "grad_norm": 0.6315828365581327, | |
| "learning_rate": 8.469948539957432e-07, | |
| "loss": 0.3249, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.33238709677419354, | |
| "grad_norm": 0.6718128979339714, | |
| "learning_rate": 8.443907487445634e-07, | |
| "loss": 0.316, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.3344516129032258, | |
| "grad_norm": 0.6429184826992661, | |
| "learning_rate": 8.417687467137964e-07, | |
| "loss": 0.3218, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.3365161290322581, | |
| "grad_norm": 0.6374025154785593, | |
| "learning_rate": 8.391289841596934e-07, | |
| "loss": 0.3158, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.33858064516129033, | |
| "grad_norm": 0.6412004629975684, | |
| "learning_rate": 8.364715982614577e-07, | |
| "loss": 0.3227, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.3406451612903226, | |
| "grad_norm": 0.6421068921819796, | |
| "learning_rate": 8.337967271141164e-07, | |
| "loss": 0.3217, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.3427096774193548, | |
| "grad_norm": 0.6387875404347384, | |
| "learning_rate": 8.311045097213431e-07, | |
| "loss": 0.3294, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.3447741935483871, | |
| "grad_norm": 0.6400040799598268, | |
| "learning_rate": 8.283950859882358e-07, | |
| "loss": 0.3289, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.34683870967741937, | |
| "grad_norm": 0.6544339151363505, | |
| "learning_rate": 8.256685967140449e-07, | |
| "loss": 0.3189, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.3489032258064516, | |
| "grad_norm": 0.6474316864675644, | |
| "learning_rate": 8.229251835848573e-07, | |
| "loss": 0.3198, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.35096774193548386, | |
| "grad_norm": 0.6331712614233053, | |
| "learning_rate": 8.201649891662334e-07, | |
| "loss": 0.3232, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.3530322580645161, | |
| "grad_norm": 0.6798002483244694, | |
| "learning_rate": 8.173881568957986e-07, | |
| "loss": 0.3204, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.3550967741935484, | |
| "grad_norm": 0.6397649167953239, | |
| "learning_rate": 8.145948310757886e-07, | |
| "loss": 0.3188, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.35716129032258065, | |
| "grad_norm": 0.6158268086730608, | |
| "learning_rate": 8.117851568655517e-07, | |
| "loss": 0.3207, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.3592258064516129, | |
| "grad_norm": 0.6344149660303308, | |
| "learning_rate": 8.089592802740039e-07, | |
| "loss": 0.3213, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.36129032258064514, | |
| "grad_norm": 0.6596708905977811, | |
| "learning_rate": 8.061173481520429e-07, | |
| "loss": 0.3169, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.36335483870967744, | |
| "grad_norm": 0.594571278861055, | |
| "learning_rate": 8.032595081849154e-07, | |
| "loss": 0.315, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.3654193548387097, | |
| "grad_norm": 0.6200795570147979, | |
| "learning_rate": 8.003859088845436e-07, | |
| "loss": 0.3175, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.36748387096774193, | |
| "grad_norm": 0.6357933185771896, | |
| "learning_rate": 7.974966995818066e-07, | |
| "loss": 0.3291, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.3695483870967742, | |
| "grad_norm": 0.6298425334317519, | |
| "learning_rate": 7.945920304187806e-07, | |
| "loss": 0.3202, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.3716129032258065, | |
| "grad_norm": 0.6197664857789437, | |
| "learning_rate": 7.916720523409366e-07, | |
| "loss": 0.3195, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.3736774193548387, | |
| "grad_norm": 0.6638430965232455, | |
| "learning_rate": 7.887369170892964e-07, | |
| "loss": 0.3242, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.37574193548387097, | |
| "grad_norm": 0.623255717934661, | |
| "learning_rate": 7.857867771925468e-07, | |
| "loss": 0.3171, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.3778064516129032, | |
| "grad_norm": 0.6203963007801919, | |
| "learning_rate": 7.828217859591133e-07, | |
| "loss": 0.3204, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.37987096774193546, | |
| "grad_norm": 0.662486034824777, | |
| "learning_rate": 7.798420974691935e-07, | |
| "loss": 0.3213, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.38193548387096776, | |
| "grad_norm": 0.6817838488299565, | |
| "learning_rate": 7.7684786656675e-07, | |
| "loss": 0.3213, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 0.616450638065689, | |
| "learning_rate": 7.738392488514627e-07, | |
| "loss": 0.3171, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.38606451612903225, | |
| "grad_norm": 0.643917095059045, | |
| "learning_rate": 7.70816400670645e-07, | |
| "loss": 0.3185, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3881290322580645, | |
| "grad_norm": 0.6353966219397654, | |
| "learning_rate": 7.677794791111168e-07, | |
| "loss": 0.3177, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.3901935483870968, | |
| "grad_norm": 0.6691601627176933, | |
| "learning_rate": 7.647286419910426e-07, | |
| "loss": 0.3186, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.39225806451612905, | |
| "grad_norm": 0.6324600834717697, | |
| "learning_rate": 7.616640478517293e-07, | |
| "loss": 0.3191, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.3943225806451613, | |
| "grad_norm": 0.662059038474577, | |
| "learning_rate": 7.585858559493885e-07, | |
| "loss": 0.3164, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.39638709677419354, | |
| "grad_norm": 0.6444450125482107, | |
| "learning_rate": 7.554942262468593e-07, | |
| "loss": 0.3224, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.3984516129032258, | |
| "grad_norm": 0.6633456465202244, | |
| "learning_rate": 7.523893194052966e-07, | |
| "loss": 0.3147, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.4005161290322581, | |
| "grad_norm": 0.6585550440311129, | |
| "learning_rate": 7.49271296775821e-07, | |
| "loss": 0.3196, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.40258064516129033, | |
| "grad_norm": 0.628650403491387, | |
| "learning_rate": 7.461403203911355e-07, | |
| "loss": 0.3192, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.4046451612903226, | |
| "grad_norm": 0.6506895506578853, | |
| "learning_rate": 7.429965529571036e-07, | |
| "loss": 0.3141, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.4067096774193548, | |
| "grad_norm": 0.6227648165056641, | |
| "learning_rate": 7.398401578442952e-07, | |
| "loss": 0.3162, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.4087741935483871, | |
| "grad_norm": 0.6273632185846456, | |
| "learning_rate": 7.366712990794961e-07, | |
| "loss": 0.3258, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.41083870967741937, | |
| "grad_norm": 0.67490569636163, | |
| "learning_rate": 7.334901413371847e-07, | |
| "loss": 0.3235, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.4129032258064516, | |
| "grad_norm": 0.6305199896100085, | |
| "learning_rate": 7.302968499309737e-07, | |
| "loss": 0.3178, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.41496774193548386, | |
| "grad_norm": 0.6747577125957268, | |
| "learning_rate": 7.270915908050204e-07, | |
| "loss": 0.3166, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.4170322580645161, | |
| "grad_norm": 0.6369902232569418, | |
| "learning_rate": 7.238745305254019e-07, | |
| "loss": 0.3234, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.4190967741935484, | |
| "grad_norm": 0.6862442619995625, | |
| "learning_rate": 7.206458362714602e-07, | |
| "loss": 0.3082, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.42116129032258065, | |
| "grad_norm": 0.6185800663811354, | |
| "learning_rate": 7.17405675827114e-07, | |
| "loss": 0.3256, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.4232258064516129, | |
| "grad_norm": 0.6421361475096339, | |
| "learning_rate": 7.1415421757214e-07, | |
| "loss": 0.3178, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.42529032258064514, | |
| "grad_norm": 0.6103129194741619, | |
| "learning_rate": 7.10891630473422e-07, | |
| "loss": 0.319, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.42735483870967744, | |
| "grad_norm": 0.6122550249520009, | |
| "learning_rate": 7.076180840761714e-07, | |
| "loss": 0.3162, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.4294193548387097, | |
| "grad_norm": 0.6723489782935563, | |
| "learning_rate": 7.043337484951154e-07, | |
| "loss": 0.3179, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.43148387096774193, | |
| "grad_norm": 0.6553625507026416, | |
| "learning_rate": 7.010387944056576e-07, | |
| "loss": 0.3131, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.4335483870967742, | |
| "grad_norm": 0.6732724028213406, | |
| "learning_rate": 6.97733393035008e-07, | |
| "loss": 0.3166, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.4356129032258064, | |
| "grad_norm": 0.6323321242398868, | |
| "learning_rate": 6.944177161532851e-07, | |
| "loss": 0.3119, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.4376774193548387, | |
| "grad_norm": 0.6408466129224434, | |
| "learning_rate": 6.910919360645902e-07, | |
| "loss": 0.3248, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.43974193548387097, | |
| "grad_norm": 0.6457416165799166, | |
| "learning_rate": 6.877562255980519e-07, | |
| "loss": 0.3228, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.4418064516129032, | |
| "grad_norm": 0.6455332593011724, | |
| "learning_rate": 6.844107580988471e-07, | |
| "loss": 0.3202, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.44387096774193546, | |
| "grad_norm": 0.6353916997435629, | |
| "learning_rate": 6.810557074191899e-07, | |
| "loss": 0.3172, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.44593548387096776, | |
| "grad_norm": 0.6305782916264491, | |
| "learning_rate": 6.776912479093001e-07, | |
| "loss": 0.3141, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 0.6455331904620217, | |
| "learning_rate": 6.743175544083403e-07, | |
| "loss": 0.3168, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.45006451612903225, | |
| "grad_norm": 0.6405530821980946, | |
| "learning_rate": 6.709348022353318e-07, | |
| "loss": 0.3177, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.4521290322580645, | |
| "grad_norm": 0.6225103140072267, | |
| "learning_rate": 6.675431671800436e-07, | |
| "loss": 0.319, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.4541935483870968, | |
| "grad_norm": 0.6230005140918707, | |
| "learning_rate": 6.64142825493856e-07, | |
| "loss": 0.3169, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.45625806451612905, | |
| "grad_norm": 0.6364344705253122, | |
| "learning_rate": 6.607339538806034e-07, | |
| "loss": 0.3134, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.4583225806451613, | |
| "grad_norm": 0.6659225896183519, | |
| "learning_rate": 6.573167294873898e-07, | |
| "loss": 0.3072, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.46038709677419354, | |
| "grad_norm": 0.6259096717319926, | |
| "learning_rate": 6.538913298953845e-07, | |
| "loss": 0.318, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.4624516129032258, | |
| "grad_norm": 0.628880285691171, | |
| "learning_rate": 6.504579331105928e-07, | |
| "loss": 0.3197, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.4645161290322581, | |
| "grad_norm": 0.6321856597080695, | |
| "learning_rate": 6.470167175546063e-07, | |
| "loss": 0.3074, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.46658064516129033, | |
| "grad_norm": 0.6110714159723901, | |
| "learning_rate": 6.435678620553306e-07, | |
| "loss": 0.3155, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.4686451612903226, | |
| "grad_norm": 0.6321195923805359, | |
| "learning_rate": 6.401115458376924e-07, | |
| "loss": 0.3085, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.4707096774193548, | |
| "grad_norm": 0.6386785331141132, | |
| "learning_rate": 6.366479485143257e-07, | |
| "loss": 0.3186, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.4727741935483871, | |
| "grad_norm": 0.6043493485728505, | |
| "learning_rate": 6.331772500762382e-07, | |
| "loss": 0.3144, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.47483870967741937, | |
| "grad_norm": 0.6255984844555192, | |
| "learning_rate": 6.296996308834575e-07, | |
| "loss": 0.3159, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.4769032258064516, | |
| "grad_norm": 0.6195083185057502, | |
| "learning_rate": 6.262152716556586e-07, | |
| "loss": 0.317, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.47896774193548386, | |
| "grad_norm": 0.6300622425863397, | |
| "learning_rate": 6.227243534627724e-07, | |
| "loss": 0.3172, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.4810322580645161, | |
| "grad_norm": 0.6595039164876313, | |
| "learning_rate": 6.192270577155764e-07, | |
| "loss": 0.3211, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.4830967741935484, | |
| "grad_norm": 0.6366219906121937, | |
| "learning_rate": 6.157235661562672e-07, | |
| "loss": 0.314, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.48516129032258065, | |
| "grad_norm": 0.6527397855569752, | |
| "learning_rate": 6.122140608490157e-07, | |
| "loss": 0.3159, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.4872258064516129, | |
| "grad_norm": 0.6603354252170923, | |
| "learning_rate": 6.086987241705066e-07, | |
| "loss": 0.315, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.48929032258064514, | |
| "grad_norm": 0.5763376111349164, | |
| "learning_rate": 6.051777388004603e-07, | |
| "loss": 0.3198, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.49135483870967744, | |
| "grad_norm": 0.629217883816345, | |
| "learning_rate": 6.0165128771214e-07, | |
| "loss": 0.3199, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.4934193548387097, | |
| "grad_norm": 0.6946582376730316, | |
| "learning_rate": 5.981195541628431e-07, | |
| "loss": 0.3209, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.49548387096774194, | |
| "grad_norm": 0.6292866101195682, | |
| "learning_rate": 5.945827216843779e-07, | |
| "loss": 0.3171, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.4975483870967742, | |
| "grad_norm": 0.6472509856407982, | |
| "learning_rate": 5.910409740735263e-07, | |
| "loss": 0.3202, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.4996129032258064, | |
| "grad_norm": 0.6397136836791187, | |
| "learning_rate": 5.874944953824919e-07, | |
| "loss": 0.3104, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.5016774193548387, | |
| "grad_norm": 0.6660104783886344, | |
| "learning_rate": 5.83943469909337e-07, | |
| "loss": 0.322, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.5037419354838709, | |
| "grad_norm": 0.6286469573260441, | |
| "learning_rate": 5.803880821884032e-07, | |
| "loss": 0.3099, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.5058064516129033, | |
| "grad_norm": 0.6333782484084417, | |
| "learning_rate": 5.768285169807233e-07, | |
| "loss": 0.3149, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.5078709677419355, | |
| "grad_norm": 0.6229633085471118, | |
| "learning_rate": 5.73264959264419e-07, | |
| "loss": 0.3102, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.5099354838709678, | |
| "grad_norm": 0.6296999697703776, | |
| "learning_rate": 5.696975942250896e-07, | |
| "loss": 0.3137, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 0.6068961890441498, | |
| "learning_rate": 5.661266072461866e-07, | |
| "loss": 0.3149, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.5140645161290323, | |
| "grad_norm": 0.6225249709979962, | |
| "learning_rate": 5.625521838993814e-07, | |
| "loss": 0.3162, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.5161290322580645, | |
| "grad_norm": 0.6317655451714331, | |
| "learning_rate": 5.589745099349219e-07, | |
| "loss": 0.3178, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.5181935483870967, | |
| "grad_norm": 0.6636706396751287, | |
| "learning_rate": 5.55393771271978e-07, | |
| "loss": 0.3157, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.520258064516129, | |
| "grad_norm": 0.6680947882529774, | |
| "learning_rate": 5.518101539889828e-07, | |
| "loss": 0.3155, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.5223225806451612, | |
| "grad_norm": 0.6360722820260652, | |
| "learning_rate": 5.482238443139597e-07, | |
| "loss": 0.3175, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.5243870967741936, | |
| "grad_norm": 0.6313288013084732, | |
| "learning_rate": 5.446350286148471e-07, | |
| "loss": 0.3116, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.5264516129032258, | |
| "grad_norm": 0.6493083937647598, | |
| "learning_rate": 5.410438933898127e-07, | |
| "loss": 0.3128, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.5285161290322581, | |
| "grad_norm": 0.6041537743593429, | |
| "learning_rate": 5.374506252575612e-07, | |
| "loss": 0.3188, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.5305806451612903, | |
| "grad_norm": 0.6454388738856257, | |
| "learning_rate": 5.338554109476379e-07, | |
| "loss": 0.3204, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.5326451612903226, | |
| "grad_norm": 0.6440197687114312, | |
| "learning_rate": 5.302584372907236e-07, | |
| "loss": 0.3187, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.5347096774193548, | |
| "grad_norm": 0.6554355554054035, | |
| "learning_rate": 5.266598912089259e-07, | |
| "loss": 0.3186, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.5367741935483871, | |
| "grad_norm": 0.6155584050496433, | |
| "learning_rate": 5.230599597060666e-07, | |
| "loss": 0.3155, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.5388387096774193, | |
| "grad_norm": 0.6242689232949261, | |
| "learning_rate": 5.194588298579623e-07, | |
| "loss": 0.3166, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.5409032258064516, | |
| "grad_norm": 0.631216431453038, | |
| "learning_rate": 5.158566888027038e-07, | |
| "loss": 0.312, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.5429677419354839, | |
| "grad_norm": 0.6119968685734067, | |
| "learning_rate": 5.122537237309305e-07, | |
| "loss": 0.3124, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.5450322580645162, | |
| "grad_norm": 0.6172652792931007, | |
| "learning_rate": 5.086501218761033e-07, | |
| "loss": 0.3101, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.5470967741935484, | |
| "grad_norm": 0.6270520847073866, | |
| "learning_rate": 5.050460705047742e-07, | |
| "loss": 0.3091, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.5491612903225807, | |
| "grad_norm": 0.6576304505675945, | |
| "learning_rate": 5.014417569068554e-07, | |
| "loss": 0.3165, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.5512258064516129, | |
| "grad_norm": 0.6442263821147718, | |
| "learning_rate": 4.978373683858858e-07, | |
| "loss": 0.3171, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.5532903225806451, | |
| "grad_norm": 0.6108213268100124, | |
| "learning_rate": 4.942330922492975e-07, | |
| "loss": 0.3177, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.5553548387096774, | |
| "grad_norm": 0.615217814445509, | |
| "learning_rate": 4.906291157986834e-07, | |
| "loss": 0.3163, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.5574193548387096, | |
| "grad_norm": 0.6172453178635782, | |
| "learning_rate": 4.870256263200616e-07, | |
| "loss": 0.3121, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.5594838709677419, | |
| "grad_norm": 0.5996732217672011, | |
| "learning_rate": 4.834228110741447e-07, | |
| "loss": 0.3234, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.5615483870967742, | |
| "grad_norm": 0.6653609799045959, | |
| "learning_rate": 4.798208572866074e-07, | |
| "loss": 0.3193, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.5636129032258065, | |
| "grad_norm": 0.6395915777401148, | |
| "learning_rate": 4.762199521383575e-07, | |
| "loss": 0.3195, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.5656774193548387, | |
| "grad_norm": 0.6135325202391676, | |
| "learning_rate": 4.72620282755809e-07, | |
| "loss": 0.3175, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.567741935483871, | |
| "grad_norm": 0.6284093608378871, | |
| "learning_rate": 4.690220362011573e-07, | |
| "loss": 0.3145, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.5698064516129032, | |
| "grad_norm": 0.6157411851456424, | |
| "learning_rate": 4.6542539946265823e-07, | |
| "loss": 0.3135, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.5718709677419355, | |
| "grad_norm": 0.6599419221635924, | |
| "learning_rate": 4.618305594449113e-07, | |
| "loss": 0.3158, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.5739354838709677, | |
| "grad_norm": 0.6238143697793432, | |
| "learning_rate": 4.5823770295914706e-07, | |
| "loss": 0.3126, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 0.6312415400069866, | |
| "learning_rate": 4.5464701671351815e-07, | |
| "loss": 0.3176, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.5780645161290323, | |
| "grad_norm": 0.6170664554211615, | |
| "learning_rate": 4.5105868730339785e-07, | |
| "loss": 0.3125, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.5801290322580646, | |
| "grad_norm": 0.6352871069335873, | |
| "learning_rate": 4.4747290120168316e-07, | |
| "loss": 0.3147, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.5821935483870968, | |
| "grad_norm": 0.6205499456241415, | |
| "learning_rate": 4.438898447491036e-07, | |
| "loss": 0.3155, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.584258064516129, | |
| "grad_norm": 0.6546974655194456, | |
| "learning_rate": 4.403097041445395e-07, | |
| "loss": 0.316, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.5863225806451613, | |
| "grad_norm": 0.6331409057896509, | |
| "learning_rate": 4.367326654353436e-07, | |
| "loss": 0.3153, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.5883870967741935, | |
| "grad_norm": 0.6541263665072116, | |
| "learning_rate": 4.331589145076746e-07, | |
| "loss": 0.319, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.5904516129032258, | |
| "grad_norm": 0.6523571212271052, | |
| "learning_rate": 4.295886370768367e-07, | |
| "loss": 0.3196, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.592516129032258, | |
| "grad_norm": 0.6166147769430033, | |
| "learning_rate": 4.2602201867762875e-07, | |
| "loss": 0.3068, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.5945806451612903, | |
| "grad_norm": 0.6055985194256791, | |
| "learning_rate": 4.224592446547024e-07, | |
| "loss": 0.3106, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.5966451612903226, | |
| "grad_norm": 0.6227015026832325, | |
| "learning_rate": 4.1890050015293036e-07, | |
| "loss": 0.3178, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.5987096774193549, | |
| "grad_norm": 0.623960880562434, | |
| "learning_rate": 4.153459701077856e-07, | |
| "loss": 0.3159, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.6007741935483871, | |
| "grad_norm": 0.6756131349407722, | |
| "learning_rate": 4.117958392357303e-07, | |
| "loss": 0.3207, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.6028387096774194, | |
| "grad_norm": 0.6506140494701154, | |
| "learning_rate": 4.082502920246173e-07, | |
| "loss": 0.3096, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.6049032258064516, | |
| "grad_norm": 0.6535456937528376, | |
| "learning_rate": 4.047095127241026e-07, | |
| "loss": 0.3197, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.6069677419354839, | |
| "grad_norm": 0.6565191550520586, | |
| "learning_rate": 4.011736853360702e-07, | |
| "loss": 0.321, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.6090322580645161, | |
| "grad_norm": 0.59801785191147, | |
| "learning_rate": 3.976429936050709e-07, | |
| "loss": 0.3141, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.6110967741935484, | |
| "grad_norm": 0.628166782801977, | |
| "learning_rate": 3.941176210087737e-07, | |
| "loss": 0.3134, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.6131612903225806, | |
| "grad_norm": 0.6702663789481017, | |
| "learning_rate": 3.905977507484304e-07, | |
| "loss": 0.3168, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.615225806451613, | |
| "grad_norm": 0.6225856950934301, | |
| "learning_rate": 3.870835657393558e-07, | |
| "loss": 0.3209, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.6172903225806452, | |
| "grad_norm": 0.6505865990947035, | |
| "learning_rate": 3.8357524860142205e-07, | |
| "loss": 0.3152, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.6193548387096774, | |
| "grad_norm": 0.6074929984057879, | |
| "learning_rate": 3.8007298164956866e-07, | |
| "loss": 0.3203, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.6214193548387097, | |
| "grad_norm": 0.6272149481166996, | |
| "learning_rate": 3.765769468843284e-07, | |
| "loss": 0.3028, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.6234838709677419, | |
| "grad_norm": 0.61659281354388, | |
| "learning_rate": 3.7308732598236896e-07, | |
| "loss": 0.312, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.6255483870967742, | |
| "grad_norm": 0.6419204625089964, | |
| "learning_rate": 3.696043002870521e-07, | |
| "loss": 0.3116, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.6276129032258064, | |
| "grad_norm": 0.6441099222868198, | |
| "learning_rate": 3.6612805079900954e-07, | |
| "loss": 0.3147, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.6296774193548387, | |
| "grad_norm": 0.6426841231722075, | |
| "learning_rate": 3.6265875816673784e-07, | |
| "loss": 0.3146, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.6317419354838709, | |
| "grad_norm": 0.6358867549576873, | |
| "learning_rate": 3.591966026772096e-07, | |
| "loss": 0.3109, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.6338064516129033, | |
| "grad_norm": 0.6217305627910245, | |
| "learning_rate": 3.557417642465055e-07, | |
| "loss": 0.3161, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.6358709677419355, | |
| "grad_norm": 0.6172913230548381, | |
| "learning_rate": 3.522944224104643e-07, | |
| "loss": 0.3182, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.6379354838709678, | |
| "grad_norm": 0.6312545282019393, | |
| "learning_rate": 3.4885475631535253e-07, | |
| "loss": 0.3234, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.6822374675039082, | |
| "learning_rate": 3.4542294470855626e-07, | |
| "loss": 0.3122, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.6420645161290323, | |
| "grad_norm": 0.6238435801987837, | |
| "learning_rate": 3.4199916592929055e-07, | |
| "loss": 0.3131, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.6441290322580645, | |
| "grad_norm": 0.6362639512522529, | |
| "learning_rate": 3.3858359789933313e-07, | |
| "loss": 0.3157, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.6461935483870968, | |
| "grad_norm": 0.6133147083090063, | |
| "learning_rate": 3.351764181137771e-07, | |
| "loss": 0.3176, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.648258064516129, | |
| "grad_norm": 0.6129689059138792, | |
| "learning_rate": 3.3177780363180923e-07, | |
| "loss": 0.3154, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.6503225806451612, | |
| "grad_norm": 0.6496128305000797, | |
| "learning_rate": 3.2838793106750625e-07, | |
| "loss": 0.3137, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.6523870967741936, | |
| "grad_norm": 0.6396028987013508, | |
| "learning_rate": 3.250069765806586e-07, | |
| "loss": 0.3141, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.6544516129032258, | |
| "grad_norm": 0.6530804143564158, | |
| "learning_rate": 3.2163511586761566e-07, | |
| "loss": 0.3157, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.6565161290322581, | |
| "grad_norm": 0.612114348974523, | |
| "learning_rate": 3.1827252415215454e-07, | |
| "loss": 0.3182, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.6585806451612903, | |
| "grad_norm": 0.6191950050314984, | |
| "learning_rate": 3.149193761763764e-07, | |
| "loss": 0.3131, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.6606451612903226, | |
| "grad_norm": 0.6335743458257347, | |
| "learning_rate": 3.115758461916229e-07, | |
| "loss": 0.3122, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.6627096774193548, | |
| "grad_norm": 0.6081224963128786, | |
| "learning_rate": 3.0824210794942333e-07, | |
| "loss": 0.3143, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.6647741935483871, | |
| "grad_norm": 0.6257725158537328, | |
| "learning_rate": 3.049183346924643e-07, | |
| "loss": 0.3154, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.6668387096774193, | |
| "grad_norm": 0.6652587310323215, | |
| "learning_rate": 3.016046991455865e-07, | |
| "loss": 0.3087, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.6689032258064516, | |
| "grad_norm": 0.6178973896870249, | |
| "learning_rate": 2.983013735068105e-07, | |
| "loss": 0.3179, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.6709677419354839, | |
| "grad_norm": 0.6224226055440618, | |
| "learning_rate": 2.9500852943838603e-07, | |
| "loss": 0.3091, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.6730322580645162, | |
| "grad_norm": 0.6736812063475199, | |
| "learning_rate": 2.9172633805787293e-07, | |
| "loss": 0.319, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.6750967741935484, | |
| "grad_norm": 0.625725567804461, | |
| "learning_rate": 2.88454969929248e-07, | |
| "loss": 0.3085, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.6771612903225807, | |
| "grad_norm": 0.6332936271071712, | |
| "learning_rate": 2.851945950540419e-07, | |
| "loss": 0.3163, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.6792258064516129, | |
| "grad_norm": 0.6468060996640993, | |
| "learning_rate": 2.819453828625038e-07, | |
| "loss": 0.3181, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.6812903225806451, | |
| "grad_norm": 0.6444594141879042, | |
| "learning_rate": 2.787075022047981e-07, | |
| "loss": 0.3166, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.6833548387096774, | |
| "grad_norm": 0.618461769009295, | |
| "learning_rate": 2.754811213422288e-07, | |
| "loss": 0.3173, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.6854193548387096, | |
| "grad_norm": 0.6041385090076459, | |
| "learning_rate": 2.722664079384954e-07, | |
| "loss": 0.3114, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.6874838709677419, | |
| "grad_norm": 0.6119130627331196, | |
| "learning_rate": 2.6906352905098116e-07, | |
| "loss": 0.314, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.6895483870967742, | |
| "grad_norm": 0.6480908972345034, | |
| "learning_rate": 2.658726511220712e-07, | |
| "loss": 0.3199, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.6916129032258065, | |
| "grad_norm": 0.6353300382456004, | |
| "learning_rate": 2.6269393997050205e-07, | |
| "loss": 0.3147, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.6936774193548387, | |
| "grad_norm": 0.6192059686376392, | |
| "learning_rate": 2.595275607827457e-07, | |
| "loss": 0.3129, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.695741935483871, | |
| "grad_norm": 0.6089960449636171, | |
| "learning_rate": 2.563736781044262e-07, | |
| "loss": 0.3092, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.6978064516129032, | |
| "grad_norm": 0.6266402041271057, | |
| "learning_rate": 2.5323245583176667e-07, | |
| "loss": 0.313, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.6998709677419355, | |
| "grad_norm": 0.5940796344475668, | |
| "learning_rate": 2.5010405720307355e-07, | |
| "loss": 0.3119, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.7019354838709677, | |
| "grad_norm": 0.6353842389907374, | |
| "learning_rate": 2.469886447902541e-07, | |
| "loss": 0.3152, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 0.6271834587115767, | |
| "learning_rate": 2.438863804903666e-07, | |
| "loss": 0.3159, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.7060645161290322, | |
| "grad_norm": 0.6335854782909629, | |
| "learning_rate": 2.4079742551720887e-07, | |
| "loss": 0.3103, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.7081290322580646, | |
| "grad_norm": 0.6645732860198468, | |
| "learning_rate": 2.3772194039293896e-07, | |
| "loss": 0.3158, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.7101935483870968, | |
| "grad_norm": 0.6288245840777608, | |
| "learning_rate": 2.3466008493973477e-07, | |
| "loss": 0.3148, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.712258064516129, | |
| "grad_norm": 0.6387607278411881, | |
| "learning_rate": 2.3161201827148725e-07, | |
| "loss": 0.3202, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.7143225806451613, | |
| "grad_norm": 0.6231283283369418, | |
| "learning_rate": 2.2857789878553309e-07, | |
| "loss": 0.3166, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.7163870967741935, | |
| "grad_norm": 0.5952558586464339, | |
| "learning_rate": 2.2555788415442288e-07, | |
| "loss": 0.3119, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.7184516129032258, | |
| "grad_norm": 0.6660428513391485, | |
| "learning_rate": 2.22552131317727e-07, | |
| "loss": 0.3133, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.720516129032258, | |
| "grad_norm": 0.6378788981156114, | |
| "learning_rate": 2.1956079647388025e-07, | |
| "loss": 0.3057, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.7225806451612903, | |
| "grad_norm": 0.6333103274834864, | |
| "learning_rate": 2.165840350720655e-07, | |
| "loss": 0.3172, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.7246451612903225, | |
| "grad_norm": 0.6111454843884092, | |
| "learning_rate": 2.1362200180413481e-07, | |
| "loss": 0.3135, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.7267096774193549, | |
| "grad_norm": 0.6205327964139117, | |
| "learning_rate": 2.1067485059657032e-07, | |
| "loss": 0.3185, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.7287741935483871, | |
| "grad_norm": 0.6699284667407549, | |
| "learning_rate": 2.0774273460248577e-07, | |
| "loss": 0.3204, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.7308387096774194, | |
| "grad_norm": 0.6179110137346477, | |
| "learning_rate": 2.0482580619366796e-07, | |
| "loss": 0.3112, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.7329032258064516, | |
| "grad_norm": 0.6074917560720133, | |
| "learning_rate": 2.019242169526581e-07, | |
| "loss": 0.3139, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.7349677419354839, | |
| "grad_norm": 0.6310306529351029, | |
| "learning_rate": 1.9903811766487426e-07, | |
| "loss": 0.3119, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.7370322580645161, | |
| "grad_norm": 0.664083184766849, | |
| "learning_rate": 1.9616765831077603e-07, | |
| "loss": 0.3216, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.7390967741935484, | |
| "grad_norm": 0.6364296679991407, | |
| "learning_rate": 1.9331298805807095e-07, | |
| "loss": 0.3201, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.7411612903225806, | |
| "grad_norm": 0.6286717380476362, | |
| "learning_rate": 1.9047425525396161e-07, | |
| "loss": 0.3112, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.743225806451613, | |
| "grad_norm": 0.6346887009435065, | |
| "learning_rate": 1.876516074174379e-07, | |
| "loss": 0.3106, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.7452903225806452, | |
| "grad_norm": 0.6085474387411121, | |
| "learning_rate": 1.848451912316103e-07, | |
| "loss": 0.3152, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.7473548387096774, | |
| "grad_norm": 0.6250309615399917, | |
| "learning_rate": 1.8205515253608688e-07, | |
| "loss": 0.3118, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.7494193548387097, | |
| "grad_norm": 0.6329400757803024, | |
| "learning_rate": 1.792816363193952e-07, | |
| "loss": 0.316, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.7514838709677419, | |
| "grad_norm": 0.6580450917336371, | |
| "learning_rate": 1.7652478671144755e-07, | |
| "loss": 0.3131, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.7535483870967742, | |
| "grad_norm": 0.6388637187577192, | |
| "learning_rate": 1.7378474697605128e-07, | |
| "loss": 0.3177, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.7556129032258064, | |
| "grad_norm": 0.6143703605590203, | |
| "learning_rate": 1.7106165950346318e-07, | |
| "loss": 0.3191, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.7576774193548387, | |
| "grad_norm": 0.6273664608270475, | |
| "learning_rate": 1.683556658029903e-07, | |
| "loss": 0.3125, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.7597419354838709, | |
| "grad_norm": 0.6586710436884331, | |
| "learning_rate": 1.656669064956368e-07, | |
| "loss": 0.3105, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.7618064516129033, | |
| "grad_norm": 0.6461005253964683, | |
| "learning_rate": 1.6299552130679578e-07, | |
| "loss": 0.3175, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.7638709677419355, | |
| "grad_norm": 0.6473704793930669, | |
| "learning_rate": 1.6034164905898768e-07, | |
| "loss": 0.3158, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.7659354838709678, | |
| "grad_norm": 0.6887318889016925, | |
| "learning_rate": 1.5770542766464773e-07, | |
| "loss": 0.3158, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 0.6145074649843777, | |
| "learning_rate": 1.5508699411895738e-07, | |
| "loss": 0.3189, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.7700645161290323, | |
| "grad_norm": 0.6117699994497662, | |
| "learning_rate": 1.524864844927266e-07, | |
| "loss": 0.3192, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.7721290322580645, | |
| "grad_norm": 0.6442807746229446, | |
| "learning_rate": 1.4990403392532159e-07, | |
| "loss": 0.3091, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 0.6232325765752856, | |
| "learning_rate": 1.473397766176431e-07, | |
| "loss": 0.3174, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.776258064516129, | |
| "grad_norm": 0.6579855922539816, | |
| "learning_rate": 1.4479384582515153e-07, | |
| "loss": 0.3183, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.7783225806451612, | |
| "grad_norm": 0.6400471085265736, | |
| "learning_rate": 1.4226637385094247e-07, | |
| "loss": 0.315, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.7803870967741936, | |
| "grad_norm": 0.6339992409795476, | |
| "learning_rate": 1.3975749203887228e-07, | |
| "loss": 0.318, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.7824516129032258, | |
| "grad_norm": 0.6614920296989713, | |
| "learning_rate": 1.3726733076673085e-07, | |
| "loss": 0.3155, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.7845161290322581, | |
| "grad_norm": 0.6149620463556447, | |
| "learning_rate": 1.3479601943946761e-07, | |
| "loss": 0.3093, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.7865806451612903, | |
| "grad_norm": 0.642745944566185, | |
| "learning_rate": 1.323436864824664e-07, | |
| "loss": 0.3171, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.7886451612903226, | |
| "grad_norm": 0.6152220466779936, | |
| "learning_rate": 1.299104593348721e-07, | |
| "loss": 0.3207, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.7907096774193548, | |
| "grad_norm": 0.6429579966758033, | |
| "learning_rate": 1.2749646444296703e-07, | |
| "loss": 0.316, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.7927741935483871, | |
| "grad_norm": 0.6338893602645223, | |
| "learning_rate": 1.2510182725360086e-07, | |
| "loss": 0.3077, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.7948387096774193, | |
| "grad_norm": 0.6092406969812145, | |
| "learning_rate": 1.2272667220767158e-07, | |
| "loss": 0.32, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.7969032258064516, | |
| "grad_norm": 0.609726533631423, | |
| "learning_rate": 1.2037112273365818e-07, | |
| "loss": 0.3127, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.7989677419354839, | |
| "grad_norm": 0.6419258740410181, | |
| "learning_rate": 1.1803530124120714e-07, | |
| "loss": 0.3141, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.8010322580645162, | |
| "grad_norm": 0.623356524162269, | |
| "learning_rate": 1.157193291147705e-07, | |
| "loss": 0.3133, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.8030967741935484, | |
| "grad_norm": 0.6205282254055249, | |
| "learning_rate": 1.1342332670729882e-07, | |
| "loss": 0.3099, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.8051612903225807, | |
| "grad_norm": 0.6371775065392936, | |
| "learning_rate": 1.1114741333398592e-07, | |
| "loss": 0.3152, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.8072258064516129, | |
| "grad_norm": 0.6100575734172149, | |
| "learning_rate": 1.0889170726606933e-07, | |
| "loss": 0.3106, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.8092903225806451, | |
| "grad_norm": 0.632559732600231, | |
| "learning_rate": 1.066563257246838e-07, | |
| "loss": 0.3088, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.8113548387096774, | |
| "grad_norm": 0.6181373045584015, | |
| "learning_rate": 1.0444138487476944e-07, | |
| "loss": 0.3138, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.8134193548387096, | |
| "grad_norm": 0.6259151100943298, | |
| "learning_rate": 1.0224699981903517e-07, | |
| "loss": 0.3177, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.8154838709677419, | |
| "grad_norm": 0.6117914186807475, | |
| "learning_rate": 1.0007328459197778e-07, | |
| "loss": 0.3162, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.8175483870967742, | |
| "grad_norm": 0.6387066615699482, | |
| "learning_rate": 9.792035215395556e-08, | |
| "loss": 0.3156, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.8196129032258065, | |
| "grad_norm": 0.6289156638043008, | |
| "learning_rate": 9.578831438531776e-08, | |
| "loss": 0.3216, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.8216774193548387, | |
| "grad_norm": 0.6494192268070564, | |
| "learning_rate": 9.3677282080591e-08, | |
| "loss": 0.3155, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.823741935483871, | |
| "grad_norm": 0.6152684331534928, | |
| "learning_rate": 9.158736494272179e-08, | |
| "loss": 0.3142, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.8258064516129032, | |
| "grad_norm": 0.6252970551532324, | |
| "learning_rate": 8.951867157737558e-08, | |
| "loss": 0.3206, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.8278709677419355, | |
| "grad_norm": 0.6124835286826313, | |
| "learning_rate": 8.747130948729226e-08, | |
| "loss": 0.3118, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.8299354838709677, | |
| "grad_norm": 0.6416414008426811, | |
| "learning_rate": 8.544538506670074e-08, | |
| "loss": 0.3101, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 0.6334705227262246, | |
| "learning_rate": 8.344100359578904e-08, | |
| "loss": 0.311, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.8340645161290322, | |
| "grad_norm": 0.603175358881232, | |
| "learning_rate": 8.145826923523358e-08, | |
| "loss": 0.3051, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.8361290322580646, | |
| "grad_norm": 0.6298247075384997, | |
| "learning_rate": 7.949728502078668e-08, | |
| "loss": 0.3103, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.8381935483870968, | |
| "grad_norm": 0.6579316907441995, | |
| "learning_rate": 7.755815285792172e-08, | |
| "loss": 0.3227, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.840258064516129, | |
| "grad_norm": 0.6066666948849365, | |
| "learning_rate": 7.564097351653742e-08, | |
| "loss": 0.3249, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.8423225806451613, | |
| "grad_norm": 0.6180551794332899, | |
| "learning_rate": 7.374584662572142e-08, | |
| "loss": 0.319, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.8443870967741935, | |
| "grad_norm": 0.5883783638702929, | |
| "learning_rate": 7.187287066857289e-08, | |
| "loss": 0.3085, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.8464516129032258, | |
| "grad_norm": 0.6473801259147952, | |
| "learning_rate": 7.002214297708481e-08, | |
| "loss": 0.3087, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.848516129032258, | |
| "grad_norm": 0.6148667467318439, | |
| "learning_rate": 6.819375972708536e-08, | |
| "loss": 0.3092, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.8505806451612903, | |
| "grad_norm": 0.6348250486724527, | |
| "learning_rate": 6.6387815933241e-08, | |
| "loss": 0.3143, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.8526451612903225, | |
| "grad_norm": 0.6315802775439991, | |
| "learning_rate": 6.460440544411777e-08, | |
| "loss": 0.3143, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.8547096774193549, | |
| "grad_norm": 0.6076346417593953, | |
| "learning_rate": 6.284362093730545e-08, | |
| "loss": 0.3105, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.8567741935483871, | |
| "grad_norm": 0.5962317858372677, | |
| "learning_rate": 6.110555391460026e-08, | |
| "loss": 0.3152, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.8588387096774194, | |
| "grad_norm": 0.5927251015186508, | |
| "learning_rate": 5.9390294697251045e-08, | |
| "loss": 0.3108, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.8609032258064516, | |
| "grad_norm": 0.6066991888644118, | |
| "learning_rate": 5.7697932421264415e-08, | |
| "loss": 0.3037, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.8629677419354839, | |
| "grad_norm": 0.6003227795214134, | |
| "learning_rate": 5.602855503277376e-08, | |
| "loss": 0.3126, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.8650322580645161, | |
| "grad_norm": 0.60428884775973, | |
| "learning_rate": 5.438224928346791e-08, | |
| "loss": 0.3138, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.8670967741935484, | |
| "grad_norm": 0.62060666748443, | |
| "learning_rate": 5.275910072608408e-08, | |
| "loss": 0.3187, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.8691612903225806, | |
| "grad_norm": 0.6290563694560117, | |
| "learning_rate": 5.115919370996097e-08, | |
| "loss": 0.3181, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.8712258064516128, | |
| "grad_norm": 0.5932518096375957, | |
| "learning_rate": 4.9582611376655924e-08, | |
| "loss": 0.3231, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.8732903225806452, | |
| "grad_norm": 0.6352198420824648, | |
| "learning_rate": 4.8029435655624785e-08, | |
| "loss": 0.309, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.8753548387096775, | |
| "grad_norm": 0.6188377103818619, | |
| "learning_rate": 4.6499747259963254e-08, | |
| "loss": 0.3138, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.8774193548387097, | |
| "grad_norm": 0.6387211687173309, | |
| "learning_rate": 4.499362568221327e-08, | |
| "loss": 0.3072, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.8794838709677419, | |
| "grad_norm": 0.6469413697906533, | |
| "learning_rate": 4.351114919023197e-08, | |
| "loss": 0.3174, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.8815483870967742, | |
| "grad_norm": 0.609278472353098, | |
| "learning_rate": 4.205239482312445e-08, | |
| "loss": 0.3127, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.8836129032258064, | |
| "grad_norm": 0.6106614475011715, | |
| "learning_rate": 4.0617438387239746e-08, | |
| "loss": 0.3169, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.8856774193548387, | |
| "grad_norm": 0.5880264333058545, | |
| "learning_rate": 3.9206354452232135e-08, | |
| "loss": 0.3103, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.8877419354838709, | |
| "grad_norm": 0.6345936372283331, | |
| "learning_rate": 3.7819216347185715e-08, | |
| "loss": 0.3161, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.8898064516129032, | |
| "grad_norm": 0.6267523098231387, | |
| "learning_rate": 3.6456096156803616e-08, | |
| "loss": 0.3112, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.8918709677419355, | |
| "grad_norm": 0.5986450915786209, | |
| "learning_rate": 3.5117064717662406e-08, | |
| "loss": 0.3096, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.8939354838709678, | |
| "grad_norm": 0.620818430567561, | |
| "learning_rate": 3.380219161453063e-08, | |
| "loss": 0.3191, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 0.6536206656078076, | |
| "learning_rate": 3.251154517675264e-08, | |
| "loss": 0.3132, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.8980645161290323, | |
| "grad_norm": 0.6620246651441947, | |
| "learning_rate": 3.124519247469814e-08, | |
| "loss": 0.3135, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.9001290322580645, | |
| "grad_norm": 0.6432332552038761, | |
| "learning_rate": 3.0003199316276486e-08, | |
| "loss": 0.3086, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.9021935483870968, | |
| "grad_norm": 0.6157763583488753, | |
| "learning_rate": 2.8785630243517156e-08, | |
| "loss": 0.3074, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.904258064516129, | |
| "grad_norm": 0.6365850518212122, | |
| "learning_rate": 2.759254852921522e-08, | |
| "loss": 0.3137, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.9063225806451612, | |
| "grad_norm": 0.6315485223419045, | |
| "learning_rate": 2.6424016173643816e-08, | |
| "loss": 0.3132, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.9083870967741936, | |
| "grad_norm": 0.6373095133174064, | |
| "learning_rate": 2.5280093901331957e-08, | |
| "loss": 0.3072, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.9104516129032258, | |
| "grad_norm": 0.6017307890815197, | |
| "learning_rate": 2.4160841157908894e-08, | |
| "loss": 0.3162, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.9125161290322581, | |
| "grad_norm": 0.6465531889379581, | |
| "learning_rate": 2.3066316107014984e-08, | |
| "loss": 0.3212, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.9145806451612903, | |
| "grad_norm": 0.6029856939651695, | |
| "learning_rate": 2.199657562727919e-08, | |
| "loss": 0.3171, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.9166451612903226, | |
| "grad_norm": 0.644409396257745, | |
| "learning_rate": 2.0951675309363038e-08, | |
| "loss": 0.3187, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.9187096774193548, | |
| "grad_norm": 0.6526376809487803, | |
| "learning_rate": 1.9931669453072064e-08, | |
| "loss": 0.3228, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.9207741935483871, | |
| "grad_norm": 0.6345017488836956, | |
| "learning_rate": 1.893661106453387e-08, | |
| "loss": 0.3179, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.9228387096774193, | |
| "grad_norm": 0.6421680406292284, | |
| "learning_rate": 1.7966551853443813e-08, | |
| "loss": 0.311, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.9249032258064516, | |
| "grad_norm": 0.6535141196988801, | |
| "learning_rate": 1.7021542230377495e-08, | |
| "loss": 0.3117, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.9269677419354839, | |
| "grad_norm": 0.6516997261163685, | |
| "learning_rate": 1.610163130417119e-08, | |
| "loss": 0.3164, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.9290322580645162, | |
| "grad_norm": 0.6139278478128292, | |
| "learning_rate": 1.520686687937006e-08, | |
| "loss": 0.3119, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.9310967741935484, | |
| "grad_norm": 0.6280704101718017, | |
| "learning_rate": 1.4337295453743848e-08, | |
| "loss": 0.3146, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.9331612903225807, | |
| "grad_norm": 0.6177747730934665, | |
| "learning_rate": 1.3492962215870208e-08, | |
| "loss": 0.3151, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.9352258064516129, | |
| "grad_norm": 0.6311802153022037, | |
| "learning_rate": 1.2673911042786812e-08, | |
| "loss": 0.3147, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.9372903225806452, | |
| "grad_norm": 0.6298140193496882, | |
| "learning_rate": 1.188018449771111e-08, | |
| "loss": 0.3114, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.9393548387096774, | |
| "grad_norm": 0.6033792966514346, | |
| "learning_rate": 1.1111823827828438e-08, | |
| "loss": 0.3158, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.9414193548387096, | |
| "grad_norm": 0.5961880904551417, | |
| "learning_rate": 1.0368868962148446e-08, | |
| "loss": 0.3117, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.9434838709677419, | |
| "grad_norm": 0.6557939191645362, | |
| "learning_rate": 9.651358509430385e-09, | |
| "loss": 0.3194, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.9455483870967742, | |
| "grad_norm": 0.628397602387223, | |
| "learning_rate": 8.959329756176359e-09, | |
| "loss": 0.3122, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.9476129032258065, | |
| "grad_norm": 0.6294158507698498, | |
| "learning_rate": 8.292818664694223e-09, | |
| "loss": 0.3097, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.9496774193548387, | |
| "grad_norm": 0.6329054623458709, | |
| "learning_rate": 7.651859871228072e-09, | |
| "loss": 0.3171, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.951741935483871, | |
| "grad_norm": 0.6192025568076179, | |
| "learning_rate": 7.0364866841589045e-09, | |
| "loss": 0.3169, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.9538064516129032, | |
| "grad_norm": 0.6510490608752881, | |
| "learning_rate": 6.446731082273449e-09, | |
| "loss": 0.3152, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.9558709677419355, | |
| "grad_norm": 0.6509717448947699, | |
| "learning_rate": 5.8826237131022196e-09, | |
| "loss": 0.3161, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.9579354838709677, | |
| "grad_norm": 0.6267847738737716, | |
| "learning_rate": 5.344193891327286e-09, | |
| "loss": 0.3135, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6072626235316539, | |
| "learning_rate": 4.831469597258331e-09, | |
| "loss": 0.3115, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.9620645161290322, | |
| "grad_norm": 0.62913760686652, | |
| "learning_rate": 4.344477475379027e-09, | |
| "loss": 0.3158, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.9641290322580646, | |
| "grad_norm": 0.6237025476138797, | |
| "learning_rate": 3.883242832962319e-09, | |
| "loss": 0.3185, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.9661935483870968, | |
| "grad_norm": 0.6386110886092738, | |
| "learning_rate": 3.4477896387552497e-09, | |
| "loss": 0.3158, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.9682580645161291, | |
| "grad_norm": 0.6128452175027437, | |
| "learning_rate": 3.0381405217333457e-09, | |
| "loss": 0.3117, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.9703225806451613, | |
| "grad_norm": 0.6373113791110392, | |
| "learning_rate": 2.6543167699247248e-09, | |
| "loss": 0.3191, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.9723870967741935, | |
| "grad_norm": 0.6687919646494422, | |
| "learning_rate": 2.2963383293039264e-09, | |
| "loss": 0.3213, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.9744516129032258, | |
| "grad_norm": 0.6488120079489716, | |
| "learning_rate": 1.964223802755238e-09, | |
| "loss": 0.3175, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.976516129032258, | |
| "grad_norm": 0.637435741828532, | |
| "learning_rate": 1.6579904491059726e-09, | |
| "loss": 0.3084, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.9785806451612903, | |
| "grad_norm": 0.5999360130685788, | |
| "learning_rate": 1.3776541822297926e-09, | |
| "loss": 0.3189, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.9806451612903225, | |
| "grad_norm": 0.619868587243893, | |
| "learning_rate": 1.1232295702193751e-09, | |
| "loss": 0.3165, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.9827096774193549, | |
| "grad_norm": 0.6601674936337429, | |
| "learning_rate": 8.947298346296816e-10, | |
| "loss": 0.3232, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.9847741935483871, | |
| "grad_norm": 0.6254003815023664, | |
| "learning_rate": 6.921668497907873e-10, | |
| "loss": 0.3146, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.9868387096774194, | |
| "grad_norm": 0.6500214007015074, | |
| "learning_rate": 5.155511421906511e-10, | |
| "loss": 0.3148, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.9889032258064516, | |
| "grad_norm": 0.6187202682538294, | |
| "learning_rate": 3.64891889928276e-10, | |
| "loss": 0.3151, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.9909677419354839, | |
| "grad_norm": 0.6323548620934831, | |
| "learning_rate": 2.4019692223675727e-10, | |
| "loss": 0.3114, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.9930322580645161, | |
| "grad_norm": 0.6355942016945179, | |
| "learning_rate": 1.4147271907621883e-10, | |
| "loss": 0.3128, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.9950967741935484, | |
| "grad_norm": 0.6569052859865055, | |
| "learning_rate": 6.87244107974716e-11, | |
| "loss": 0.312, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.9971612903225806, | |
| "grad_norm": 0.6459570341315254, | |
| "learning_rate": 2.1955777874838045e-11, | |
| "loss": 0.317, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.9992258064516129, | |
| "grad_norm": 0.6173828325733423, | |
| "learning_rate": 1.1692507103089334e-12, | |
| "loss": 0.3087, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.9998451612903225, | |
| "step": 4843, | |
| "total_flos": 409696561004544.0, | |
| "train_loss": 0.32205133897914096, | |
| "train_runtime": 161287.2429, | |
| "train_samples_per_second": 7.688, | |
| "train_steps_per_second": 0.03 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4843, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 409696561004544.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |