diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,65148 @@ +{ + "best_metric": 2.092881679534912, + "best_model_checkpoint": "final_models/laft_lug_phi/checkpoint-6196", + "epoch": 6.0, + "eval_steps": 500, + "global_step": 9294, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0006455777921239509, + "grad_norm": 14.158392189575459, + "learning_rate": 1.5e-07, + "loss": 5.6116, + "step": 1 + }, + { + "epoch": 0.0012911555842479018, + "grad_norm": 15.084177761444046, + "learning_rate": 3e-07, + "loss": 5.5853, + "step": 2 + }, + { + "epoch": 0.001936733376371853, + "grad_norm": 12.751805816476198, + "learning_rate": 4.5e-07, + "loss": 5.751, + "step": 3 + }, + { + "epoch": 0.0025823111684958036, + "grad_norm": 11.601350323846248, + "learning_rate": 6e-07, + "loss": 5.5501, + "step": 4 + }, + { + "epoch": 0.0032278889606197547, + "grad_norm": 14.802055931285869, + "learning_rate": 7.499999999999999e-07, + "loss": 5.8333, + "step": 5 + }, + { + "epoch": 0.003873466752743706, + "grad_norm": 11.309559352160983, + "learning_rate": 9e-07, + "loss": 5.5359, + "step": 6 + }, + { + "epoch": 0.004519044544867657, + "grad_norm": 12.107743199649939, + "learning_rate": 1.05e-06, + "loss": 5.815, + "step": 7 + }, + { + "epoch": 0.005164622336991607, + "grad_norm": 11.649560978079151, + "learning_rate": 1.2e-06, + "loss": 5.7407, + "step": 8 + }, + { + "epoch": 0.005810200129115558, + "grad_norm": 15.353218338808345, + "learning_rate": 1.3499999999999998e-06, + "loss": 5.6328, + "step": 9 + }, + { + "epoch": 0.006455777921239509, + "grad_norm": 10.846040439266313, + "learning_rate": 1.4999999999999998e-06, + "loss": 5.5965, + "step": 10 + }, + { + "epoch": 0.0071013557133634605, + "grad_norm": 10.127810218484433, + "learning_rate": 1.6499999999999997e-06, + "loss": 5.4615, + "step": 11 + }, + { + "epoch": 0.007746933505487412, + "grad_norm": 10.431714291496096, + "learning_rate": 1.8e-06, + "loss": 5.6834, + "step": 12 + }, + { + "epoch": 0.008392511297611363, + "grad_norm": 9.196452603567318, + "learning_rate": 1.9499999999999995e-06, + "loss": 5.3809, + "step": 13 + }, + { + "epoch": 0.009038089089735314, + "grad_norm": 9.539196166379286, + "learning_rate": 2.1e-06, + "loss": 5.4606, + "step": 14 + }, + { + "epoch": 0.009683666881859263, + "grad_norm": 11.901865311818305, + "learning_rate": 2.2499999999999996e-06, + "loss": 5.5288, + "step": 15 + }, + { + "epoch": 0.010329244673983214, + "grad_norm": 9.505001194165317, + "learning_rate": 2.4e-06, + "loss": 5.3139, + "step": 16 + }, + { + "epoch": 0.010974822466107165, + "grad_norm": 8.507874165547681, + "learning_rate": 2.55e-06, + "loss": 5.2705, + "step": 17 + }, + { + "epoch": 0.011620400258231117, + "grad_norm": 9.578022695273086, + "learning_rate": 2.6999999999999996e-06, + "loss": 5.1848, + "step": 18 + }, + { + "epoch": 0.012265978050355068, + "grad_norm": 9.986201757190656, + "learning_rate": 2.85e-06, + "loss": 5.1822, + "step": 19 + }, + { + "epoch": 0.012911555842479019, + "grad_norm": 10.113869866213156, + "learning_rate": 2.9999999999999997e-06, + "loss": 5.1205, + "step": 20 + }, + { + "epoch": 0.01355713363460297, + "grad_norm": 10.111107873279526, + "learning_rate": 3.15e-06, + "loss": 5.0319, + "step": 21 + }, + { + "epoch": 0.014202711426726921, + "grad_norm": 9.587242461962632, + "learning_rate": 3.2999999999999993e-06, + "loss": 5.055, + "step": 22 + }, + { + "epoch": 0.014848289218850872, + "grad_norm": 9.99255209723234, + "learning_rate": 3.4499999999999996e-06, + "loss": 4.8969, + "step": 23 + }, + { + "epoch": 0.015493867010974823, + "grad_norm": 7.281417590636142, + "learning_rate": 3.6e-06, + "loss": 4.7346, + "step": 24 + }, + { + "epoch": 0.016139444803098774, + "grad_norm": 7.883803911786202, + "learning_rate": 3.7499999999999997e-06, + "loss": 4.9113, + "step": 25 + }, + { + "epoch": 0.016785022595222725, + "grad_norm": 7.596102788273765, + "learning_rate": 3.899999999999999e-06, + "loss": 4.6698, + "step": 26 + }, + { + "epoch": 0.017430600387346677, + "grad_norm": 6.945528645654032, + "learning_rate": 4.049999999999999e-06, + "loss": 4.8191, + "step": 27 + }, + { + "epoch": 0.018076178179470628, + "grad_norm": 6.919216746671974, + "learning_rate": 4.2e-06, + "loss": 4.7983, + "step": 28 + }, + { + "epoch": 0.01872175597159458, + "grad_norm": 6.199068049784946, + "learning_rate": 4.35e-06, + "loss": 4.8828, + "step": 29 + }, + { + "epoch": 0.019367333763718526, + "grad_norm": 7.5209330580747675, + "learning_rate": 4.499999999999999e-06, + "loss": 4.6375, + "step": 30 + }, + { + "epoch": 0.020012911555842477, + "grad_norm": 6.488213429936977, + "learning_rate": 4.6499999999999995e-06, + "loss": 4.7524, + "step": 31 + }, + { + "epoch": 0.02065848934796643, + "grad_norm": 6.76930438966971, + "learning_rate": 4.8e-06, + "loss": 4.4819, + "step": 32 + }, + { + "epoch": 0.02130406714009038, + "grad_norm": 6.092087098506284, + "learning_rate": 4.95e-06, + "loss": 4.5379, + "step": 33 + }, + { + "epoch": 0.02194964493221433, + "grad_norm": 5.495585170311883, + "learning_rate": 5.1e-06, + "loss": 4.655, + "step": 34 + }, + { + "epoch": 0.022595222724338282, + "grad_norm": 6.182870998064873, + "learning_rate": 5.25e-06, + "loss": 4.3167, + "step": 35 + }, + { + "epoch": 0.023240800516462233, + "grad_norm": 6.63023707298776, + "learning_rate": 5.399999999999999e-06, + "loss": 4.484, + "step": 36 + }, + { + "epoch": 0.023886378308586184, + "grad_norm": 5.9296114734560685, + "learning_rate": 5.549999999999999e-06, + "loss": 4.2486, + "step": 37 + }, + { + "epoch": 0.024531956100710135, + "grad_norm": 6.23639622337333, + "learning_rate": 5.7e-06, + "loss": 4.43, + "step": 38 + }, + { + "epoch": 0.025177533892834086, + "grad_norm": 6.746387225482685, + "learning_rate": 5.85e-06, + "loss": 4.2658, + "step": 39 + }, + { + "epoch": 0.025823111684958037, + "grad_norm": 5.700370642203259, + "learning_rate": 5.999999999999999e-06, + "loss": 4.3833, + "step": 40 + }, + { + "epoch": 0.02646868947708199, + "grad_norm": 5.989836509627605, + "learning_rate": 6.1499999999999996e-06, + "loss": 4.4273, + "step": 41 + }, + { + "epoch": 0.02711426726920594, + "grad_norm": 5.094978555694676, + "learning_rate": 6.3e-06, + "loss": 4.1133, + "step": 42 + }, + { + "epoch": 0.02775984506132989, + "grad_norm": 4.58758971941016, + "learning_rate": 6.449999999999999e-06, + "loss": 4.0121, + "step": 43 + }, + { + "epoch": 0.028405422853453842, + "grad_norm": 4.995331141721478, + "learning_rate": 6.599999999999999e-06, + "loss": 4.3336, + "step": 44 + }, + { + "epoch": 0.029051000645577793, + "grad_norm": 4.781544734689018, + "learning_rate": 6.749999999999999e-06, + "loss": 3.9922, + "step": 45 + }, + { + "epoch": 0.029696578437701744, + "grad_norm": 5.196009150995352, + "learning_rate": 6.899999999999999e-06, + "loss": 4.1441, + "step": 46 + }, + { + "epoch": 0.030342156229825695, + "grad_norm": 7.220152205766252, + "learning_rate": 7.049999999999999e-06, + "loss": 4.1244, + "step": 47 + }, + { + "epoch": 0.030987734021949646, + "grad_norm": 5.547817833344847, + "learning_rate": 7.2e-06, + "loss": 4.2501, + "step": 48 + }, + { + "epoch": 0.0316333118140736, + "grad_norm": 5.881657059802617, + "learning_rate": 7.35e-06, + "loss": 4.1573, + "step": 49 + }, + { + "epoch": 0.03227888960619755, + "grad_norm": 5.358855445178018, + "learning_rate": 7.499999999999999e-06, + "loss": 4.1125, + "step": 50 + }, + { + "epoch": 0.0329244673983215, + "grad_norm": 5.669452258885774, + "learning_rate": 7.65e-06, + "loss": 4.2051, + "step": 51 + }, + { + "epoch": 0.03357004519044545, + "grad_norm": 6.895764704245376, + "learning_rate": 7.799999999999998e-06, + "loss": 4.1775, + "step": 52 + }, + { + "epoch": 0.0342156229825694, + "grad_norm": 4.559146960017215, + "learning_rate": 7.949999999999998e-06, + "loss": 3.9782, + "step": 53 + }, + { + "epoch": 0.03486120077469335, + "grad_norm": 5.010433610106685, + "learning_rate": 8.099999999999999e-06, + "loss": 3.7051, + "step": 54 + }, + { + "epoch": 0.035506778566817304, + "grad_norm": 4.72301826939047, + "learning_rate": 8.249999999999999e-06, + "loss": 3.9948, + "step": 55 + }, + { + "epoch": 0.036152356358941255, + "grad_norm": 6.1028389281837025, + "learning_rate": 8.4e-06, + "loss": 4.0268, + "step": 56 + }, + { + "epoch": 0.036797934151065206, + "grad_norm": 4.686432817316888, + "learning_rate": 8.55e-06, + "loss": 3.9989, + "step": 57 + }, + { + "epoch": 0.03744351194318916, + "grad_norm": 5.002707263475115, + "learning_rate": 8.7e-06, + "loss": 3.9711, + "step": 58 + }, + { + "epoch": 0.03808908973531311, + "grad_norm": 5.191025661960439, + "learning_rate": 8.849999999999998e-06, + "loss": 3.8395, + "step": 59 + }, + { + "epoch": 0.03873466752743705, + "grad_norm": 4.679797044575768, + "learning_rate": 8.999999999999999e-06, + "loss": 3.7527, + "step": 60 + }, + { + "epoch": 0.039380245319561004, + "grad_norm": 4.4669766357430305, + "learning_rate": 9.149999999999999e-06, + "loss": 3.8446, + "step": 61 + }, + { + "epoch": 0.040025823111684955, + "grad_norm": 5.2706233777104625, + "learning_rate": 9.299999999999999e-06, + "loss": 3.7564, + "step": 62 + }, + { + "epoch": 0.040671400903808906, + "grad_norm": 7.035637197582238, + "learning_rate": 9.45e-06, + "loss": 4.0565, + "step": 63 + }, + { + "epoch": 0.04131697869593286, + "grad_norm": 6.966467693767865, + "learning_rate": 9.6e-06, + "loss": 3.9485, + "step": 64 + }, + { + "epoch": 0.04196255648805681, + "grad_norm": 5.004198561478764, + "learning_rate": 9.75e-06, + "loss": 3.9361, + "step": 65 + }, + { + "epoch": 0.04260813428018076, + "grad_norm": 6.728586351708955, + "learning_rate": 9.9e-06, + "loss": 3.64, + "step": 66 + }, + { + "epoch": 0.04325371207230471, + "grad_norm": 6.046444113360888, + "learning_rate": 1.005e-05, + "loss": 3.8503, + "step": 67 + }, + { + "epoch": 0.04389928986442866, + "grad_norm": 4.232983221039184, + "learning_rate": 1.02e-05, + "loss": 3.7363, + "step": 68 + }, + { + "epoch": 0.04454486765655261, + "grad_norm": 4.6399255469256335, + "learning_rate": 1.035e-05, + "loss": 3.3365, + "step": 69 + }, + { + "epoch": 0.045190445448676564, + "grad_norm": 5.361883756289874, + "learning_rate": 1.05e-05, + "loss": 3.6701, + "step": 70 + }, + { + "epoch": 0.045836023240800515, + "grad_norm": 4.971230976856912, + "learning_rate": 1.0649999999999998e-05, + "loss": 3.8138, + "step": 71 + }, + { + "epoch": 0.046481601032924466, + "grad_norm": 6.451162445035149, + "learning_rate": 1.0799999999999998e-05, + "loss": 3.797, + "step": 72 + }, + { + "epoch": 0.04712717882504842, + "grad_norm": 4.568797046587955, + "learning_rate": 1.0949999999999998e-05, + "loss": 3.8758, + "step": 73 + }, + { + "epoch": 0.04777275661717237, + "grad_norm": 5.8094073846938725, + "learning_rate": 1.1099999999999999e-05, + "loss": 3.7198, + "step": 74 + }, + { + "epoch": 0.04841833440929632, + "grad_norm": 6.399359005122636, + "learning_rate": 1.1249999999999999e-05, + "loss": 3.8712, + "step": 75 + }, + { + "epoch": 0.04906391220142027, + "grad_norm": 5.196320885554098, + "learning_rate": 1.14e-05, + "loss": 3.4733, + "step": 76 + }, + { + "epoch": 0.04970948999354422, + "grad_norm": 4.868274124799522, + "learning_rate": 1.155e-05, + "loss": 3.544, + "step": 77 + }, + { + "epoch": 0.05035506778566817, + "grad_norm": 4.4106799445996625, + "learning_rate": 1.17e-05, + "loss": 3.5787, + "step": 78 + }, + { + "epoch": 0.051000645577792124, + "grad_norm": 5.6481050241353765, + "learning_rate": 1.1849999999999998e-05, + "loss": 3.6212, + "step": 79 + }, + { + "epoch": 0.051646223369916075, + "grad_norm": 5.703112323711336, + "learning_rate": 1.1999999999999999e-05, + "loss": 3.1904, + "step": 80 + }, + { + "epoch": 0.052291801162040026, + "grad_norm": 5.9692143032264005, + "learning_rate": 1.2149999999999999e-05, + "loss": 3.5106, + "step": 81 + }, + { + "epoch": 0.05293737895416398, + "grad_norm": 5.279543444770511, + "learning_rate": 1.2299999999999999e-05, + "loss": 3.4558, + "step": 82 + }, + { + "epoch": 0.05358295674628793, + "grad_norm": 6.021349873733777, + "learning_rate": 1.245e-05, + "loss": 3.7256, + "step": 83 + }, + { + "epoch": 0.05422853453841188, + "grad_norm": 4.705145178710349, + "learning_rate": 1.26e-05, + "loss": 3.6308, + "step": 84 + }, + { + "epoch": 0.05487411233053583, + "grad_norm": 5.16745535148546, + "learning_rate": 1.275e-05, + "loss": 3.4511, + "step": 85 + }, + { + "epoch": 0.05551969012265978, + "grad_norm": 4.776476432311118, + "learning_rate": 1.2899999999999998e-05, + "loss": 3.4579, + "step": 86 + }, + { + "epoch": 0.05616526791478373, + "grad_norm": 4.376631683920921, + "learning_rate": 1.3049999999999999e-05, + "loss": 3.5177, + "step": 87 + }, + { + "epoch": 0.056810845706907684, + "grad_norm": 4.385876575418767, + "learning_rate": 1.3199999999999997e-05, + "loss": 3.5406, + "step": 88 + }, + { + "epoch": 0.057456423499031635, + "grad_norm": 5.935518959048243, + "learning_rate": 1.3349999999999998e-05, + "loss": 3.6522, + "step": 89 + }, + { + "epoch": 0.058102001291155586, + "grad_norm": 4.550408813465349, + "learning_rate": 1.3499999999999998e-05, + "loss": 3.5655, + "step": 90 + }, + { + "epoch": 0.05874757908327954, + "grad_norm": 4.042678124362284, + "learning_rate": 1.3649999999999998e-05, + "loss": 3.4621, + "step": 91 + }, + { + "epoch": 0.05939315687540349, + "grad_norm": 5.161726408516209, + "learning_rate": 1.3799999999999998e-05, + "loss": 2.9617, + "step": 92 + }, + { + "epoch": 0.06003873466752744, + "grad_norm": 4.165810774944912, + "learning_rate": 1.3949999999999999e-05, + "loss": 3.3416, + "step": 93 + }, + { + "epoch": 0.06068431245965139, + "grad_norm": 4.206128095698114, + "learning_rate": 1.4099999999999999e-05, + "loss": 3.5464, + "step": 94 + }, + { + "epoch": 0.06132989025177534, + "grad_norm": 5.563313985569593, + "learning_rate": 1.4249999999999999e-05, + "loss": 3.0721, + "step": 95 + }, + { + "epoch": 0.06197546804389929, + "grad_norm": 4.04645008488737, + "learning_rate": 1.44e-05, + "loss": 3.2851, + "step": 96 + }, + { + "epoch": 0.06262104583602324, + "grad_norm": 5.697235514520104, + "learning_rate": 1.455e-05, + "loss": 3.6506, + "step": 97 + }, + { + "epoch": 0.0632666236281472, + "grad_norm": 5.073111927074508, + "learning_rate": 1.47e-05, + "loss": 3.5091, + "step": 98 + }, + { + "epoch": 0.06391220142027114, + "grad_norm": 4.353852963486646, + "learning_rate": 1.485e-05, + "loss": 3.3843, + "step": 99 + }, + { + "epoch": 0.0645577792123951, + "grad_norm": 4.1018142450738715, + "learning_rate": 1.4999999999999999e-05, + "loss": 3.2798, + "step": 100 + }, + { + "epoch": 0.06520335700451904, + "grad_norm": 4.947445671737025, + "learning_rate": 1.5149999999999999e-05, + "loss": 3.4329, + "step": 101 + }, + { + "epoch": 0.065848934796643, + "grad_norm": 4.418460549482913, + "learning_rate": 1.53e-05, + "loss": 3.4906, + "step": 102 + }, + { + "epoch": 0.06649451258876694, + "grad_norm": 5.197297552486868, + "learning_rate": 1.545e-05, + "loss": 3.4323, + "step": 103 + }, + { + "epoch": 0.0671400903808909, + "grad_norm": 5.613759126938728, + "learning_rate": 1.5599999999999996e-05, + "loss": 3.2835, + "step": 104 + }, + { + "epoch": 0.06778566817301485, + "grad_norm": 4.204369571764076, + "learning_rate": 1.5749999999999997e-05, + "loss": 3.5471, + "step": 105 + }, + { + "epoch": 0.0684312459651388, + "grad_norm": 3.8491869688917406, + "learning_rate": 1.5899999999999997e-05, + "loss": 3.4019, + "step": 106 + }, + { + "epoch": 0.06907682375726275, + "grad_norm": 4.549219398056657, + "learning_rate": 1.6049999999999997e-05, + "loss": 3.4642, + "step": 107 + }, + { + "epoch": 0.0697224015493867, + "grad_norm": 4.674856801627368, + "learning_rate": 1.6199999999999997e-05, + "loss": 3.5001, + "step": 108 + }, + { + "epoch": 0.07036797934151065, + "grad_norm": 4.045556138194685, + "learning_rate": 1.6349999999999998e-05, + "loss": 3.507, + "step": 109 + }, + { + "epoch": 0.07101355713363461, + "grad_norm": 3.830705447411254, + "learning_rate": 1.6499999999999998e-05, + "loss": 3.4531, + "step": 110 + }, + { + "epoch": 0.07165913492575855, + "grad_norm": 4.0735827568132565, + "learning_rate": 1.6649999999999998e-05, + "loss": 3.0889, + "step": 111 + }, + { + "epoch": 0.07230471271788251, + "grad_norm": 4.181422031167849, + "learning_rate": 1.68e-05, + "loss": 3.2478, + "step": 112 + }, + { + "epoch": 0.07295029051000645, + "grad_norm": 3.7224291843628796, + "learning_rate": 1.695e-05, + "loss": 3.2591, + "step": 113 + }, + { + "epoch": 0.07359586830213041, + "grad_norm": 4.5685757646777185, + "learning_rate": 1.71e-05, + "loss": 3.4809, + "step": 114 + }, + { + "epoch": 0.07424144609425436, + "grad_norm": 4.036152435793681, + "learning_rate": 1.725e-05, + "loss": 3.2717, + "step": 115 + }, + { + "epoch": 0.07488702388637831, + "grad_norm": 3.847402342777321, + "learning_rate": 1.74e-05, + "loss": 3.4891, + "step": 116 + }, + { + "epoch": 0.07553260167850226, + "grad_norm": 3.5558587865715627, + "learning_rate": 1.755e-05, + "loss": 3.1742, + "step": 117 + }, + { + "epoch": 0.07617817947062622, + "grad_norm": 4.312592712599566, + "learning_rate": 1.7699999999999997e-05, + "loss": 3.2026, + "step": 118 + }, + { + "epoch": 0.07682375726275016, + "grad_norm": 4.050918196230232, + "learning_rate": 1.7849999999999997e-05, + "loss": 3.1958, + "step": 119 + }, + { + "epoch": 0.0774693350548741, + "grad_norm": 3.570747117602931, + "learning_rate": 1.7999999999999997e-05, + "loss": 3.2911, + "step": 120 + }, + { + "epoch": 0.07811491284699806, + "grad_norm": 4.590385085985248, + "learning_rate": 1.8149999999999997e-05, + "loss": 3.2546, + "step": 121 + }, + { + "epoch": 0.07876049063912201, + "grad_norm": 4.301843491893148, + "learning_rate": 1.8299999999999998e-05, + "loss": 3.2984, + "step": 122 + }, + { + "epoch": 0.07940606843124597, + "grad_norm": 4.973422097098932, + "learning_rate": 1.8449999999999998e-05, + "loss": 3.2085, + "step": 123 + }, + { + "epoch": 0.08005164622336991, + "grad_norm": 4.434287887258886, + "learning_rate": 1.8599999999999998e-05, + "loss": 3.3524, + "step": 124 + }, + { + "epoch": 0.08069722401549387, + "grad_norm": 4.795058129819485, + "learning_rate": 1.875e-05, + "loss": 2.8464, + "step": 125 + }, + { + "epoch": 0.08134280180761781, + "grad_norm": 4.706758556130797, + "learning_rate": 1.89e-05, + "loss": 2.969, + "step": 126 + }, + { + "epoch": 0.08198837959974177, + "grad_norm": 5.214500165966482, + "learning_rate": 1.905e-05, + "loss": 3.2276, + "step": 127 + }, + { + "epoch": 0.08263395739186571, + "grad_norm": 4.285253858085143, + "learning_rate": 1.92e-05, + "loss": 3.4163, + "step": 128 + }, + { + "epoch": 0.08327953518398967, + "grad_norm": 4.890420279126149, + "learning_rate": 1.935e-05, + "loss": 3.3426, + "step": 129 + }, + { + "epoch": 0.08392511297611362, + "grad_norm": 4.328151450797413, + "learning_rate": 1.95e-05, + "loss": 3.3592, + "step": 130 + }, + { + "epoch": 0.08457069076823757, + "grad_norm": 3.890472057996496, + "learning_rate": 1.965e-05, + "loss": 3.252, + "step": 131 + }, + { + "epoch": 0.08521626856036152, + "grad_norm": 4.285513080915741, + "learning_rate": 1.98e-05, + "loss": 3.3153, + "step": 132 + }, + { + "epoch": 0.08586184635248548, + "grad_norm": 4.089038464520009, + "learning_rate": 1.995e-05, + "loss": 2.8851, + "step": 133 + }, + { + "epoch": 0.08650742414460942, + "grad_norm": 4.113525485615411, + "learning_rate": 2.01e-05, + "loss": 3.2159, + "step": 134 + }, + { + "epoch": 0.08715300193673338, + "grad_norm": 4.198143850693633, + "learning_rate": 2.025e-05, + "loss": 3.3462, + "step": 135 + }, + { + "epoch": 0.08779857972885732, + "grad_norm": 4.960679954102358, + "learning_rate": 2.04e-05, + "loss": 3.3496, + "step": 136 + }, + { + "epoch": 0.08844415752098128, + "grad_norm": 4.378442988490239, + "learning_rate": 2.055e-05, + "loss": 3.3123, + "step": 137 + }, + { + "epoch": 0.08908973531310523, + "grad_norm": 4.039767133185382, + "learning_rate": 2.07e-05, + "loss": 3.3631, + "step": 138 + }, + { + "epoch": 0.08973531310522918, + "grad_norm": 4.03044229039286, + "learning_rate": 2.085e-05, + "loss": 3.3132, + "step": 139 + }, + { + "epoch": 0.09038089089735313, + "grad_norm": 3.8313545454396007, + "learning_rate": 2.1e-05, + "loss": 3.205, + "step": 140 + }, + { + "epoch": 0.09102646868947709, + "grad_norm": 3.9525643603242773, + "learning_rate": 2.1149999999999996e-05, + "loss": 3.0605, + "step": 141 + }, + { + "epoch": 0.09167204648160103, + "grad_norm": 3.5171100837972213, + "learning_rate": 2.1299999999999996e-05, + "loss": 3.3125, + "step": 142 + }, + { + "epoch": 0.09231762427372499, + "grad_norm": 3.69297807725956, + "learning_rate": 2.1449999999999996e-05, + "loss": 3.0452, + "step": 143 + }, + { + "epoch": 0.09296320206584893, + "grad_norm": 4.075982147881248, + "learning_rate": 2.1599999999999996e-05, + "loss": 3.2222, + "step": 144 + }, + { + "epoch": 0.09360877985797289, + "grad_norm": 3.908900955068572, + "learning_rate": 2.1749999999999997e-05, + "loss": 3.1528, + "step": 145 + }, + { + "epoch": 0.09425435765009683, + "grad_norm": 4.504630728729918, + "learning_rate": 2.1899999999999997e-05, + "loss": 2.7325, + "step": 146 + }, + { + "epoch": 0.09489993544222079, + "grad_norm": 3.90789610629542, + "learning_rate": 2.2049999999999997e-05, + "loss": 3.3873, + "step": 147 + }, + { + "epoch": 0.09554551323434474, + "grad_norm": 3.835424839416688, + "learning_rate": 2.2199999999999998e-05, + "loss": 3.0175, + "step": 148 + }, + { + "epoch": 0.0961910910264687, + "grad_norm": 3.7401421872745018, + "learning_rate": 2.2349999999999998e-05, + "loss": 3.3319, + "step": 149 + }, + { + "epoch": 0.09683666881859264, + "grad_norm": 3.7123159822897396, + "learning_rate": 2.2499999999999998e-05, + "loss": 3.0565, + "step": 150 + }, + { + "epoch": 0.0974822466107166, + "grad_norm": 4.0445022758352405, + "learning_rate": 2.2649999999999998e-05, + "loss": 3.3207, + "step": 151 + }, + { + "epoch": 0.09812782440284054, + "grad_norm": 3.834541189539634, + "learning_rate": 2.28e-05, + "loss": 3.2698, + "step": 152 + }, + { + "epoch": 0.0987734021949645, + "grad_norm": 3.9085933551681515, + "learning_rate": 2.295e-05, + "loss": 3.0905, + "step": 153 + }, + { + "epoch": 0.09941897998708844, + "grad_norm": 3.5748526584869356, + "learning_rate": 2.31e-05, + "loss": 3.0646, + "step": 154 + }, + { + "epoch": 0.1000645577792124, + "grad_norm": 3.814078868387233, + "learning_rate": 2.325e-05, + "loss": 2.8931, + "step": 155 + }, + { + "epoch": 0.10071013557133635, + "grad_norm": 3.6496868243027967, + "learning_rate": 2.34e-05, + "loss": 3.2231, + "step": 156 + }, + { + "epoch": 0.1013557133634603, + "grad_norm": 3.623116086795427, + "learning_rate": 2.3549999999999996e-05, + "loss": 3.1965, + "step": 157 + }, + { + "epoch": 0.10200129115558425, + "grad_norm": 5.99224296628058, + "learning_rate": 2.3699999999999997e-05, + "loss": 3.2939, + "step": 158 + }, + { + "epoch": 0.10264686894770819, + "grad_norm": 5.153580936707791, + "learning_rate": 2.3849999999999997e-05, + "loss": 3.3765, + "step": 159 + }, + { + "epoch": 0.10329244673983215, + "grad_norm": 3.6328261276186162, + "learning_rate": 2.3999999999999997e-05, + "loss": 3.2678, + "step": 160 + }, + { + "epoch": 0.1039380245319561, + "grad_norm": 3.6631229276744652, + "learning_rate": 2.4149999999999997e-05, + "loss": 3.297, + "step": 161 + }, + { + "epoch": 0.10458360232408005, + "grad_norm": 4.493877883912098, + "learning_rate": 2.4299999999999998e-05, + "loss": 3.3556, + "step": 162 + }, + { + "epoch": 0.105229180116204, + "grad_norm": 4.101555362793111, + "learning_rate": 2.4449999999999998e-05, + "loss": 3.3698, + "step": 163 + }, + { + "epoch": 0.10587475790832795, + "grad_norm": 3.374277187393748, + "learning_rate": 2.4599999999999998e-05, + "loss": 3.1211, + "step": 164 + }, + { + "epoch": 0.1065203357004519, + "grad_norm": 4.102563749028788, + "learning_rate": 2.475e-05, + "loss": 3.1647, + "step": 165 + }, + { + "epoch": 0.10716591349257586, + "grad_norm": 3.951929708972765, + "learning_rate": 2.49e-05, + "loss": 3.0619, + "step": 166 + }, + { + "epoch": 0.1078114912846998, + "grad_norm": 3.39689467679511, + "learning_rate": 2.505e-05, + "loss": 3.1192, + "step": 167 + }, + { + "epoch": 0.10845706907682376, + "grad_norm": 3.754827132611778, + "learning_rate": 2.52e-05, + "loss": 2.7923, + "step": 168 + }, + { + "epoch": 0.1091026468689477, + "grad_norm": 4.090505580424874, + "learning_rate": 2.535e-05, + "loss": 3.0916, + "step": 169 + }, + { + "epoch": 0.10974822466107166, + "grad_norm": 4.915009272186659, + "learning_rate": 2.55e-05, + "loss": 3.3302, + "step": 170 + }, + { + "epoch": 0.1103938024531956, + "grad_norm": 3.4246300234438936, + "learning_rate": 2.565e-05, + "loss": 2.7306, + "step": 171 + }, + { + "epoch": 0.11103938024531956, + "grad_norm": 4.12659675033384, + "learning_rate": 2.5799999999999997e-05, + "loss": 3.4256, + "step": 172 + }, + { + "epoch": 0.11168495803744351, + "grad_norm": 4.063004878688796, + "learning_rate": 2.5949999999999997e-05, + "loss": 3.0721, + "step": 173 + }, + { + "epoch": 0.11233053582956747, + "grad_norm": 4.02360771663168, + "learning_rate": 2.6099999999999997e-05, + "loss": 3.3463, + "step": 174 + }, + { + "epoch": 0.11297611362169141, + "grad_norm": 3.648438994610923, + "learning_rate": 2.6249999999999998e-05, + "loss": 2.8788, + "step": 175 + }, + { + "epoch": 0.11362169141381537, + "grad_norm": 3.8431504676495085, + "learning_rate": 2.6399999999999995e-05, + "loss": 2.9901, + "step": 176 + }, + { + "epoch": 0.11426726920593931, + "grad_norm": 3.8153220277354607, + "learning_rate": 2.6549999999999995e-05, + "loss": 2.6959, + "step": 177 + }, + { + "epoch": 0.11491284699806327, + "grad_norm": 3.5807796550779107, + "learning_rate": 2.6699999999999995e-05, + "loss": 3.03, + "step": 178 + }, + { + "epoch": 0.11555842479018721, + "grad_norm": 3.697594147449689, + "learning_rate": 2.6849999999999995e-05, + "loss": 3.0334, + "step": 179 + }, + { + "epoch": 0.11620400258231117, + "grad_norm": 3.7557641534108615, + "learning_rate": 2.6999999999999996e-05, + "loss": 3.0586, + "step": 180 + }, + { + "epoch": 0.11684958037443512, + "grad_norm": 3.7464046772853923, + "learning_rate": 2.7149999999999996e-05, + "loss": 3.0256, + "step": 181 + }, + { + "epoch": 0.11749515816655907, + "grad_norm": 3.9253083441205545, + "learning_rate": 2.7299999999999996e-05, + "loss": 3.1572, + "step": 182 + }, + { + "epoch": 0.11814073595868302, + "grad_norm": 3.533404842143466, + "learning_rate": 2.7449999999999996e-05, + "loss": 2.9583, + "step": 183 + }, + { + "epoch": 0.11878631375080698, + "grad_norm": 3.9971641975193273, + "learning_rate": 2.7599999999999997e-05, + "loss": 3.0949, + "step": 184 + }, + { + "epoch": 0.11943189154293092, + "grad_norm": 3.4062346654311386, + "learning_rate": 2.7749999999999997e-05, + "loss": 2.648, + "step": 185 + }, + { + "epoch": 0.12007746933505488, + "grad_norm": 3.657564891219952, + "learning_rate": 2.7899999999999997e-05, + "loss": 3.1468, + "step": 186 + }, + { + "epoch": 0.12072304712717882, + "grad_norm": 4.330420767941609, + "learning_rate": 2.8049999999999997e-05, + "loss": 2.623, + "step": 187 + }, + { + "epoch": 0.12136862491930278, + "grad_norm": 4.2847510124647, + "learning_rate": 2.8199999999999998e-05, + "loss": 3.2372, + "step": 188 + }, + { + "epoch": 0.12201420271142673, + "grad_norm": 4.235981530754495, + "learning_rate": 2.8349999999999998e-05, + "loss": 3.2102, + "step": 189 + }, + { + "epoch": 0.12265978050355068, + "grad_norm": 3.6130934005178816, + "learning_rate": 2.8499999999999998e-05, + "loss": 3.003, + "step": 190 + }, + { + "epoch": 0.12330535829567463, + "grad_norm": 3.96922568416227, + "learning_rate": 2.865e-05, + "loss": 3.1686, + "step": 191 + }, + { + "epoch": 0.12395093608779859, + "grad_norm": 3.392847513913656, + "learning_rate": 2.88e-05, + "loss": 2.9814, + "step": 192 + }, + { + "epoch": 0.12459651387992253, + "grad_norm": 3.685147356164431, + "learning_rate": 2.895e-05, + "loss": 3.2369, + "step": 193 + }, + { + "epoch": 0.12524209167204647, + "grad_norm": 3.698534825192292, + "learning_rate": 2.91e-05, + "loss": 3.0896, + "step": 194 + }, + { + "epoch": 0.12588766946417043, + "grad_norm": 4.068013808677721, + "learning_rate": 2.925e-05, + "loss": 3.0374, + "step": 195 + }, + { + "epoch": 0.1265332472562944, + "grad_norm": 3.5473136871496265, + "learning_rate": 2.94e-05, + "loss": 3.0514, + "step": 196 + }, + { + "epoch": 0.12717882504841835, + "grad_norm": 3.5467867356760743, + "learning_rate": 2.955e-05, + "loss": 3.1952, + "step": 197 + }, + { + "epoch": 0.12782440284054228, + "grad_norm": 3.4624267575386343, + "learning_rate": 2.97e-05, + "loss": 2.8294, + "step": 198 + }, + { + "epoch": 0.12846998063266624, + "grad_norm": 3.5122274465445185, + "learning_rate": 2.985e-05, + "loss": 3.1512, + "step": 199 + }, + { + "epoch": 0.1291155584247902, + "grad_norm": 3.497615047701147, + "learning_rate": 2.9999999999999997e-05, + "loss": 2.8089, + "step": 200 + }, + { + "epoch": 0.12976113621691412, + "grad_norm": 3.4479806215894886, + "learning_rate": 3.0149999999999998e-05, + "loss": 3.0237, + "step": 201 + }, + { + "epoch": 0.13040671400903808, + "grad_norm": 3.5567479987338517, + "learning_rate": 3.0299999999999998e-05, + "loss": 3.0895, + "step": 202 + }, + { + "epoch": 0.13105229180116204, + "grad_norm": 3.4522182832764345, + "learning_rate": 3.0449999999999998e-05, + "loss": 3.0179, + "step": 203 + }, + { + "epoch": 0.131697869593286, + "grad_norm": 4.0167364437663675, + "learning_rate": 3.06e-05, + "loss": 2.857, + "step": 204 + }, + { + "epoch": 0.13234344738540993, + "grad_norm": 3.859158067944711, + "learning_rate": 3.0749999999999995e-05, + "loss": 3.1872, + "step": 205 + }, + { + "epoch": 0.1329890251775339, + "grad_norm": 5.235168555965991, + "learning_rate": 3.09e-05, + "loss": 2.8615, + "step": 206 + }, + { + "epoch": 0.13363460296965785, + "grad_norm": 3.808889871214767, + "learning_rate": 3.1049999999999996e-05, + "loss": 3.257, + "step": 207 + }, + { + "epoch": 0.1342801807617818, + "grad_norm": 3.2564064646926756, + "learning_rate": 3.119999999999999e-05, + "loss": 2.8662, + "step": 208 + }, + { + "epoch": 0.13492575855390573, + "grad_norm": 3.8984412296458206, + "learning_rate": 3.1349999999999996e-05, + "loss": 3.0862, + "step": 209 + }, + { + "epoch": 0.1355713363460297, + "grad_norm": 3.620667704748298, + "learning_rate": 3.149999999999999e-05, + "loss": 3.1033, + "step": 210 + }, + { + "epoch": 0.13621691413815365, + "grad_norm": 3.122354966386682, + "learning_rate": 3.165e-05, + "loss": 2.9963, + "step": 211 + }, + { + "epoch": 0.1368624919302776, + "grad_norm": 3.2518285994958376, + "learning_rate": 3.1799999999999994e-05, + "loss": 2.8745, + "step": 212 + }, + { + "epoch": 0.13750806972240154, + "grad_norm": 3.4758819958094893, + "learning_rate": 3.195e-05, + "loss": 3.208, + "step": 213 + }, + { + "epoch": 0.1381536475145255, + "grad_norm": 3.8132765152415953, + "learning_rate": 3.2099999999999994e-05, + "loss": 2.712, + "step": 214 + }, + { + "epoch": 0.13879922530664945, + "grad_norm": 3.4860604915046154, + "learning_rate": 3.225e-05, + "loss": 3.2132, + "step": 215 + }, + { + "epoch": 0.1394448030987734, + "grad_norm": 3.068252738347945, + "learning_rate": 3.2399999999999995e-05, + "loss": 2.9618, + "step": 216 + }, + { + "epoch": 0.14009038089089734, + "grad_norm": 3.250698702060672, + "learning_rate": 3.255e-05, + "loss": 2.701, + "step": 217 + }, + { + "epoch": 0.1407359586830213, + "grad_norm": 3.6603367617253944, + "learning_rate": 3.2699999999999995e-05, + "loss": 3.0122, + "step": 218 + }, + { + "epoch": 0.14138153647514526, + "grad_norm": 3.362389281918618, + "learning_rate": 3.285e-05, + "loss": 2.5798, + "step": 219 + }, + { + "epoch": 0.14202711426726922, + "grad_norm": 3.7438059448798584, + "learning_rate": 3.2999999999999996e-05, + "loss": 3.1051, + "step": 220 + }, + { + "epoch": 0.14267269205939315, + "grad_norm": 2.8391652316986593, + "learning_rate": 3.315e-05, + "loss": 2.9039, + "step": 221 + }, + { + "epoch": 0.1433182698515171, + "grad_norm": 3.2173955457127104, + "learning_rate": 3.3299999999999996e-05, + "loss": 3.2336, + "step": 222 + }, + { + "epoch": 0.14396384764364106, + "grad_norm": 3.0182274250310672, + "learning_rate": 3.345e-05, + "loss": 2.7669, + "step": 223 + }, + { + "epoch": 0.14460942543576502, + "grad_norm": 2.981206871652357, + "learning_rate": 3.36e-05, + "loss": 2.8258, + "step": 224 + }, + { + "epoch": 0.14525500322788895, + "grad_norm": 3.8972605271826715, + "learning_rate": 3.375e-05, + "loss": 2.4968, + "step": 225 + }, + { + "epoch": 0.1459005810200129, + "grad_norm": 3.1390742393898647, + "learning_rate": 3.39e-05, + "loss": 2.989, + "step": 226 + }, + { + "epoch": 0.14654615881213687, + "grad_norm": 4.10779176506883, + "learning_rate": 3.405e-05, + "loss": 2.9488, + "step": 227 + }, + { + "epoch": 0.14719173660426083, + "grad_norm": 3.4253610993620587, + "learning_rate": 3.42e-05, + "loss": 2.9351, + "step": 228 + }, + { + "epoch": 0.14783731439638476, + "grad_norm": 3.429913359606455, + "learning_rate": 3.435e-05, + "loss": 3.0313, + "step": 229 + }, + { + "epoch": 0.1484828921885087, + "grad_norm": 2.976938762487162, + "learning_rate": 3.45e-05, + "loss": 2.5525, + "step": 230 + }, + { + "epoch": 0.14912846998063267, + "grad_norm": 4.161891931828376, + "learning_rate": 3.465e-05, + "loss": 2.8889, + "step": 231 + }, + { + "epoch": 0.14977404777275663, + "grad_norm": 3.0705104435234185, + "learning_rate": 3.48e-05, + "loss": 2.9486, + "step": 232 + }, + { + "epoch": 0.15041962556488056, + "grad_norm": 3.1469150424540824, + "learning_rate": 3.4949999999999996e-05, + "loss": 2.896, + "step": 233 + }, + { + "epoch": 0.15106520335700452, + "grad_norm": 3.021821306124831, + "learning_rate": 3.51e-05, + "loss": 3.0939, + "step": 234 + }, + { + "epoch": 0.15171078114912848, + "grad_norm": 4.43901861986546, + "learning_rate": 3.5249999999999996e-05, + "loss": 2.9554, + "step": 235 + }, + { + "epoch": 0.15235635894125243, + "grad_norm": 3.452875634850277, + "learning_rate": 3.539999999999999e-05, + "loss": 3.1123, + "step": 236 + }, + { + "epoch": 0.15300193673337636, + "grad_norm": 3.01376610718146, + "learning_rate": 3.555e-05, + "loss": 2.8864, + "step": 237 + }, + { + "epoch": 0.15364751452550032, + "grad_norm": 3.016799819169298, + "learning_rate": 3.5699999999999994e-05, + "loss": 2.9698, + "step": 238 + }, + { + "epoch": 0.15429309231762428, + "grad_norm": 2.806046525093469, + "learning_rate": 3.585e-05, + "loss": 2.7444, + "step": 239 + }, + { + "epoch": 0.1549386701097482, + "grad_norm": 3.3393477764177804, + "learning_rate": 3.5999999999999994e-05, + "loss": 3.0705, + "step": 240 + }, + { + "epoch": 0.15558424790187217, + "grad_norm": 3.218147462997492, + "learning_rate": 3.615e-05, + "loss": 2.6335, + "step": 241 + }, + { + "epoch": 0.15622982569399613, + "grad_norm": 3.009257960736446, + "learning_rate": 3.6299999999999995e-05, + "loss": 2.9064, + "step": 242 + }, + { + "epoch": 0.15687540348612009, + "grad_norm": 3.514905413034632, + "learning_rate": 3.645e-05, + "loss": 2.7147, + "step": 243 + }, + { + "epoch": 0.15752098127824402, + "grad_norm": 3.25448033722383, + "learning_rate": 3.6599999999999995e-05, + "loss": 2.963, + "step": 244 + }, + { + "epoch": 0.15816655907036797, + "grad_norm": 3.164609645795226, + "learning_rate": 3.675e-05, + "loss": 3.0222, + "step": 245 + }, + { + "epoch": 0.15881213686249193, + "grad_norm": 3.013105905420338, + "learning_rate": 3.6899999999999996e-05, + "loss": 2.919, + "step": 246 + }, + { + "epoch": 0.1594577146546159, + "grad_norm": 3.103237594533812, + "learning_rate": 3.705e-05, + "loss": 3.2344, + "step": 247 + }, + { + "epoch": 0.16010329244673982, + "grad_norm": 2.776310679001456, + "learning_rate": 3.7199999999999996e-05, + "loss": 2.8184, + "step": 248 + }, + { + "epoch": 0.16074887023886378, + "grad_norm": 2.6753671657505955, + "learning_rate": 3.735e-05, + "loss": 3.0389, + "step": 249 + }, + { + "epoch": 0.16139444803098774, + "grad_norm": 3.2789986931491044, + "learning_rate": 3.75e-05, + "loss": 2.9859, + "step": 250 + }, + { + "epoch": 0.1620400258231117, + "grad_norm": 2.852073497654096, + "learning_rate": 3.7649999999999994e-05, + "loss": 3.1697, + "step": 251 + }, + { + "epoch": 0.16268560361523562, + "grad_norm": 3.0847100889397856, + "learning_rate": 3.78e-05, + "loss": 3.0213, + "step": 252 + }, + { + "epoch": 0.16333118140735958, + "grad_norm": 3.522215154947736, + "learning_rate": 3.7949999999999994e-05, + "loss": 2.9544, + "step": 253 + }, + { + "epoch": 0.16397675919948354, + "grad_norm": 3.4976873315954338, + "learning_rate": 3.81e-05, + "loss": 2.9796, + "step": 254 + }, + { + "epoch": 0.1646223369916075, + "grad_norm": 2.647905518179324, + "learning_rate": 3.8249999999999995e-05, + "loss": 2.5837, + "step": 255 + }, + { + "epoch": 0.16526791478373143, + "grad_norm": 3.0482270972406984, + "learning_rate": 3.84e-05, + "loss": 3.0477, + "step": 256 + }, + { + "epoch": 0.1659134925758554, + "grad_norm": 3.139289495577516, + "learning_rate": 3.8549999999999995e-05, + "loss": 3.0454, + "step": 257 + }, + { + "epoch": 0.16655907036797934, + "grad_norm": 3.385514752607173, + "learning_rate": 3.87e-05, + "loss": 2.9293, + "step": 258 + }, + { + "epoch": 0.1672046481601033, + "grad_norm": 3.39637684909753, + "learning_rate": 3.8849999999999996e-05, + "loss": 3.2787, + "step": 259 + }, + { + "epoch": 0.16785022595222723, + "grad_norm": 2.913604726098393, + "learning_rate": 3.9e-05, + "loss": 2.8828, + "step": 260 + }, + { + "epoch": 0.1684958037443512, + "grad_norm": 3.164050385090309, + "learning_rate": 3.9149999999999996e-05, + "loss": 2.9463, + "step": 261 + }, + { + "epoch": 0.16914138153647515, + "grad_norm": 3.1418180025626143, + "learning_rate": 3.93e-05, + "loss": 3.0839, + "step": 262 + }, + { + "epoch": 0.1697869593285991, + "grad_norm": 3.1149175576055965, + "learning_rate": 3.945e-05, + "loss": 2.8666, + "step": 263 + }, + { + "epoch": 0.17043253712072304, + "grad_norm": 3.6492310436998863, + "learning_rate": 3.96e-05, + "loss": 3.1609, + "step": 264 + }, + { + "epoch": 0.171078114912847, + "grad_norm": 3.0503707195250684, + "learning_rate": 3.975e-05, + "loss": 2.9498, + "step": 265 + }, + { + "epoch": 0.17172369270497095, + "grad_norm": 2.5923201385487475, + "learning_rate": 3.99e-05, + "loss": 2.9408, + "step": 266 + }, + { + "epoch": 0.1723692704970949, + "grad_norm": 2.9287165103526, + "learning_rate": 4.005e-05, + "loss": 3.0056, + "step": 267 + }, + { + "epoch": 0.17301484828921884, + "grad_norm": 2.9438916584904136, + "learning_rate": 4.02e-05, + "loss": 2.9923, + "step": 268 + }, + { + "epoch": 0.1736604260813428, + "grad_norm": 2.7511136959406985, + "learning_rate": 4.035e-05, + "loss": 2.9725, + "step": 269 + }, + { + "epoch": 0.17430600387346676, + "grad_norm": 3.0919338410887245, + "learning_rate": 4.05e-05, + "loss": 2.7242, + "step": 270 + }, + { + "epoch": 0.17495158166559072, + "grad_norm": 3.3730782950584715, + "learning_rate": 4.065e-05, + "loss": 3.1962, + "step": 271 + }, + { + "epoch": 0.17559715945771465, + "grad_norm": 3.298364430545772, + "learning_rate": 4.08e-05, + "loss": 2.9813, + "step": 272 + }, + { + "epoch": 0.1762427372498386, + "grad_norm": 2.999622281519893, + "learning_rate": 4.095e-05, + "loss": 2.948, + "step": 273 + }, + { + "epoch": 0.17688831504196256, + "grad_norm": 3.1909897236261973, + "learning_rate": 4.11e-05, + "loss": 3.1093, + "step": 274 + }, + { + "epoch": 0.17753389283408652, + "grad_norm": 2.8766189510838167, + "learning_rate": 4.125e-05, + "loss": 3.021, + "step": 275 + }, + { + "epoch": 0.17817947062621045, + "grad_norm": 2.515159469432622, + "learning_rate": 4.14e-05, + "loss": 2.9127, + "step": 276 + }, + { + "epoch": 0.1788250484183344, + "grad_norm": 2.5851786708688738, + "learning_rate": 4.155e-05, + "loss": 2.8626, + "step": 277 + }, + { + "epoch": 0.17947062621045837, + "grad_norm": 2.682192005478775, + "learning_rate": 4.17e-05, + "loss": 2.9063, + "step": 278 + }, + { + "epoch": 0.1801162040025823, + "grad_norm": 3.105284993283052, + "learning_rate": 4.185e-05, + "loss": 2.9357, + "step": 279 + }, + { + "epoch": 0.18076178179470626, + "grad_norm": 2.752630108015661, + "learning_rate": 4.2e-05, + "loss": 2.8511, + "step": 280 + }, + { + "epoch": 0.1814073595868302, + "grad_norm": 2.9113091729775644, + "learning_rate": 4.215e-05, + "loss": 2.9915, + "step": 281 + }, + { + "epoch": 0.18205293737895417, + "grad_norm": 2.953713214686246, + "learning_rate": 4.229999999999999e-05, + "loss": 2.9262, + "step": 282 + }, + { + "epoch": 0.1826985151710781, + "grad_norm": 2.7398175290162583, + "learning_rate": 4.2449999999999995e-05, + "loss": 2.8394, + "step": 283 + }, + { + "epoch": 0.18334409296320206, + "grad_norm": 3.0419923434593272, + "learning_rate": 4.259999999999999e-05, + "loss": 2.8887, + "step": 284 + }, + { + "epoch": 0.18398967075532602, + "grad_norm": 3.251112851540754, + "learning_rate": 4.2749999999999996e-05, + "loss": 2.475, + "step": 285 + }, + { + "epoch": 0.18463524854744998, + "grad_norm": 3.218389082803298, + "learning_rate": 4.289999999999999e-05, + "loss": 3.1067, + "step": 286 + }, + { + "epoch": 0.1852808263395739, + "grad_norm": 2.9473567122952593, + "learning_rate": 4.3049999999999996e-05, + "loss": 2.7987, + "step": 287 + }, + { + "epoch": 0.18592640413169786, + "grad_norm": 3.129183100423129, + "learning_rate": 4.319999999999999e-05, + "loss": 2.8964, + "step": 288 + }, + { + "epoch": 0.18657198192382182, + "grad_norm": 2.8876723607782004, + "learning_rate": 4.334999999999999e-05, + "loss": 2.9006, + "step": 289 + }, + { + "epoch": 0.18721755971594578, + "grad_norm": 2.897010519265634, + "learning_rate": 4.3499999999999993e-05, + "loss": 3.13, + "step": 290 + }, + { + "epoch": 0.1878631375080697, + "grad_norm": 2.7012769962977172, + "learning_rate": 4.364999999999999e-05, + "loss": 3.0228, + "step": 291 + }, + { + "epoch": 0.18850871530019367, + "grad_norm": 3.0629840303580793, + "learning_rate": 4.3799999999999994e-05, + "loss": 3.1947, + "step": 292 + }, + { + "epoch": 0.18915429309231763, + "grad_norm": 2.824671309401795, + "learning_rate": 4.394999999999999e-05, + "loss": 3.1017, + "step": 293 + }, + { + "epoch": 0.18979987088444158, + "grad_norm": 2.6381358945222777, + "learning_rate": 4.4099999999999995e-05, + "loss": 2.85, + "step": 294 + }, + { + "epoch": 0.19044544867656552, + "grad_norm": 2.8156277681393087, + "learning_rate": 4.424999999999999e-05, + "loss": 3.1304, + "step": 295 + }, + { + "epoch": 0.19109102646868947, + "grad_norm": 2.8289759681079896, + "learning_rate": 4.4399999999999995e-05, + "loss": 3.0727, + "step": 296 + }, + { + "epoch": 0.19173660426081343, + "grad_norm": 3.0336163253154855, + "learning_rate": 4.454999999999999e-05, + "loss": 2.9718, + "step": 297 + }, + { + "epoch": 0.1923821820529374, + "grad_norm": 2.691161168923196, + "learning_rate": 4.4699999999999996e-05, + "loss": 2.854, + "step": 298 + }, + { + "epoch": 0.19302775984506132, + "grad_norm": 3.0892871390850325, + "learning_rate": 4.484999999999999e-05, + "loss": 2.5979, + "step": 299 + }, + { + "epoch": 0.19367333763718528, + "grad_norm": 2.9041380002276687, + "learning_rate": 4.4999999999999996e-05, + "loss": 2.9099, + "step": 300 + }, + { + "epoch": 0.19431891542930924, + "grad_norm": 3.277229440738727, + "learning_rate": 4.514999999999999e-05, + "loss": 2.9183, + "step": 301 + }, + { + "epoch": 0.1949644932214332, + "grad_norm": 3.0474037263997245, + "learning_rate": 4.5299999999999997e-05, + "loss": 2.8201, + "step": 302 + }, + { + "epoch": 0.19561007101355712, + "grad_norm": 3.2564919426234282, + "learning_rate": 4.5449999999999993e-05, + "loss": 2.7942, + "step": 303 + }, + { + "epoch": 0.19625564880568108, + "grad_norm": 2.8241051237058543, + "learning_rate": 4.56e-05, + "loss": 2.8849, + "step": 304 + }, + { + "epoch": 0.19690122659780504, + "grad_norm": 2.7732915675902245, + "learning_rate": 4.5749999999999994e-05, + "loss": 2.7386, + "step": 305 + }, + { + "epoch": 0.197546804389929, + "grad_norm": 3.15148575905919, + "learning_rate": 4.59e-05, + "loss": 2.7451, + "step": 306 + }, + { + "epoch": 0.19819238218205293, + "grad_norm": 2.671550658578702, + "learning_rate": 4.6049999999999994e-05, + "loss": 2.8986, + "step": 307 + }, + { + "epoch": 0.1988379599741769, + "grad_norm": 3.4335519854167633, + "learning_rate": 4.62e-05, + "loss": 2.9387, + "step": 308 + }, + { + "epoch": 0.19948353776630084, + "grad_norm": 2.789859883409782, + "learning_rate": 4.6349999999999995e-05, + "loss": 2.8408, + "step": 309 + }, + { + "epoch": 0.2001291155584248, + "grad_norm": 2.7576731157055017, + "learning_rate": 4.65e-05, + "loss": 2.855, + "step": 310 + }, + { + "epoch": 0.20077469335054873, + "grad_norm": 2.701975513571651, + "learning_rate": 4.6649999999999996e-05, + "loss": 2.8772, + "step": 311 + }, + { + "epoch": 0.2014202711426727, + "grad_norm": 2.5585826956467894, + "learning_rate": 4.68e-05, + "loss": 2.9204, + "step": 312 + }, + { + "epoch": 0.20206584893479665, + "grad_norm": 3.2756745809014736, + "learning_rate": 4.6949999999999996e-05, + "loss": 2.9555, + "step": 313 + }, + { + "epoch": 0.2027114267269206, + "grad_norm": 2.652777264052532, + "learning_rate": 4.709999999999999e-05, + "loss": 2.3872, + "step": 314 + }, + { + "epoch": 0.20335700451904454, + "grad_norm": 2.7839273248293113, + "learning_rate": 4.7249999999999997e-05, + "loss": 2.7251, + "step": 315 + }, + { + "epoch": 0.2040025823111685, + "grad_norm": 3.035158797716936, + "learning_rate": 4.7399999999999993e-05, + "loss": 2.6426, + "step": 316 + }, + { + "epoch": 0.20464816010329245, + "grad_norm": 2.9617513283290737, + "learning_rate": 4.755e-05, + "loss": 2.8036, + "step": 317 + }, + { + "epoch": 0.20529373789541638, + "grad_norm": 2.7545310491608492, + "learning_rate": 4.7699999999999994e-05, + "loss": 2.8135, + "step": 318 + }, + { + "epoch": 0.20593931568754034, + "grad_norm": 2.3935199576126878, + "learning_rate": 4.785e-05, + "loss": 2.5748, + "step": 319 + }, + { + "epoch": 0.2065848934796643, + "grad_norm": 2.690156366143486, + "learning_rate": 4.7999999999999994e-05, + "loss": 2.9242, + "step": 320 + }, + { + "epoch": 0.20723047127178826, + "grad_norm": 2.7731732546642833, + "learning_rate": 4.815e-05, + "loss": 3.0917, + "step": 321 + }, + { + "epoch": 0.2078760490639122, + "grad_norm": 2.6235894602836347, + "learning_rate": 4.8299999999999995e-05, + "loss": 2.4197, + "step": 322 + }, + { + "epoch": 0.20852162685603615, + "grad_norm": 2.727983980452325, + "learning_rate": 4.845e-05, + "loss": 2.738, + "step": 323 + }, + { + "epoch": 0.2091672046481601, + "grad_norm": 2.8287711361343892, + "learning_rate": 4.8599999999999995e-05, + "loss": 2.8092, + "step": 324 + }, + { + "epoch": 0.20981278244028406, + "grad_norm": 2.732216661272678, + "learning_rate": 4.875e-05, + "loss": 3.0275, + "step": 325 + }, + { + "epoch": 0.210458360232408, + "grad_norm": 2.792055695321844, + "learning_rate": 4.8899999999999996e-05, + "loss": 2.8292, + "step": 326 + }, + { + "epoch": 0.21110393802453195, + "grad_norm": 2.558630557659242, + "learning_rate": 4.905e-05, + "loss": 2.4693, + "step": 327 + }, + { + "epoch": 0.2117495158166559, + "grad_norm": 2.783235123503898, + "learning_rate": 4.9199999999999997e-05, + "loss": 2.5378, + "step": 328 + }, + { + "epoch": 0.21239509360877987, + "grad_norm": 2.569144137212567, + "learning_rate": 4.935e-05, + "loss": 2.8467, + "step": 329 + }, + { + "epoch": 0.2130406714009038, + "grad_norm": 2.5777311639127167, + "learning_rate": 4.95e-05, + "loss": 2.7558, + "step": 330 + }, + { + "epoch": 0.21368624919302776, + "grad_norm": 2.6011401342973253, + "learning_rate": 4.965e-05, + "loss": 3.0044, + "step": 331 + }, + { + "epoch": 0.2143318269851517, + "grad_norm": 2.751083460394851, + "learning_rate": 4.98e-05, + "loss": 2.8957, + "step": 332 + }, + { + "epoch": 0.21497740477727567, + "grad_norm": 2.5589523770269116, + "learning_rate": 4.995e-05, + "loss": 2.8364, + "step": 333 + }, + { + "epoch": 0.2156229825693996, + "grad_norm": 2.680157410600933, + "learning_rate": 5.01e-05, + "loss": 2.8139, + "step": 334 + }, + { + "epoch": 0.21626856036152356, + "grad_norm": 2.6695910423854503, + "learning_rate": 5.025e-05, + "loss": 2.9963, + "step": 335 + }, + { + "epoch": 0.21691413815364752, + "grad_norm": 2.5460406773677677, + "learning_rate": 5.04e-05, + "loss": 2.8288, + "step": 336 + }, + { + "epoch": 0.21755971594577148, + "grad_norm": 2.696615934384844, + "learning_rate": 5.055e-05, + "loss": 3.1038, + "step": 337 + }, + { + "epoch": 0.2182052937378954, + "grad_norm": 2.760178868009294, + "learning_rate": 5.07e-05, + "loss": 2.8739, + "step": 338 + }, + { + "epoch": 0.21885087153001936, + "grad_norm": 2.666230448413186, + "learning_rate": 5.0849999999999996e-05, + "loss": 3.1672, + "step": 339 + }, + { + "epoch": 0.21949644932214332, + "grad_norm": 2.4812377857454657, + "learning_rate": 5.1e-05, + "loss": 2.6713, + "step": 340 + }, + { + "epoch": 0.22014202711426728, + "grad_norm": 2.6170602071650007, + "learning_rate": 5.1149999999999996e-05, + "loss": 2.7903, + "step": 341 + }, + { + "epoch": 0.2207876049063912, + "grad_norm": 2.515104979340629, + "learning_rate": 5.13e-05, + "loss": 2.6427, + "step": 342 + }, + { + "epoch": 0.22143318269851517, + "grad_norm": 2.805059936642275, + "learning_rate": 5.145e-05, + "loss": 2.6678, + "step": 343 + }, + { + "epoch": 0.22207876049063913, + "grad_norm": 2.535283892233219, + "learning_rate": 5.1599999999999994e-05, + "loss": 2.8374, + "step": 344 + }, + { + "epoch": 0.22272433828276308, + "grad_norm": 2.6917638636996895, + "learning_rate": 5.174999999999999e-05, + "loss": 2.8725, + "step": 345 + }, + { + "epoch": 0.22336991607488701, + "grad_norm": 2.4185383886590475, + "learning_rate": 5.1899999999999994e-05, + "loss": 2.7811, + "step": 346 + }, + { + "epoch": 0.22401549386701097, + "grad_norm": 2.5372423164751963, + "learning_rate": 5.204999999999999e-05, + "loss": 2.8948, + "step": 347 + }, + { + "epoch": 0.22466107165913493, + "grad_norm": 2.565432321251173, + "learning_rate": 5.2199999999999995e-05, + "loss": 2.8072, + "step": 348 + }, + { + "epoch": 0.2253066494512589, + "grad_norm": 2.3222495529727594, + "learning_rate": 5.234999999999999e-05, + "loss": 2.7974, + "step": 349 + }, + { + "epoch": 0.22595222724338282, + "grad_norm": 2.4391230313494887, + "learning_rate": 5.2499999999999995e-05, + "loss": 3.0964, + "step": 350 + }, + { + "epoch": 0.22659780503550678, + "grad_norm": 2.215559215842581, + "learning_rate": 5.264999999999999e-05, + "loss": 2.5386, + "step": 351 + }, + { + "epoch": 0.22724338282763074, + "grad_norm": 2.383076267092991, + "learning_rate": 5.279999999999999e-05, + "loss": 2.9117, + "step": 352 + }, + { + "epoch": 0.2278889606197547, + "grad_norm": 2.642129706425525, + "learning_rate": 5.294999999999999e-05, + "loss": 3.0732, + "step": 353 + }, + { + "epoch": 0.22853453841187862, + "grad_norm": 2.5772528524893183, + "learning_rate": 5.309999999999999e-05, + "loss": 2.7114, + "step": 354 + }, + { + "epoch": 0.22918011620400258, + "grad_norm": 2.354194235547441, + "learning_rate": 5.324999999999999e-05, + "loss": 2.7723, + "step": 355 + }, + { + "epoch": 0.22982569399612654, + "grad_norm": 2.6536545768305793, + "learning_rate": 5.339999999999999e-05, + "loss": 2.8709, + "step": 356 + }, + { + "epoch": 0.2304712717882505, + "grad_norm": 2.4561550339508273, + "learning_rate": 5.3549999999999994e-05, + "loss": 2.7969, + "step": 357 + }, + { + "epoch": 0.23111684958037443, + "grad_norm": 2.543084220742945, + "learning_rate": 5.369999999999999e-05, + "loss": 2.9726, + "step": 358 + }, + { + "epoch": 0.2317624273724984, + "grad_norm": 2.6388958562854232, + "learning_rate": 5.3849999999999994e-05, + "loss": 2.7303, + "step": 359 + }, + { + "epoch": 0.23240800516462234, + "grad_norm": 2.42398855365677, + "learning_rate": 5.399999999999999e-05, + "loss": 3.0357, + "step": 360 + }, + { + "epoch": 0.23305358295674627, + "grad_norm": 2.5023830610546973, + "learning_rate": 5.4149999999999995e-05, + "loss": 2.9839, + "step": 361 + }, + { + "epoch": 0.23369916074887023, + "grad_norm": 2.4604737974522184, + "learning_rate": 5.429999999999999e-05, + "loss": 2.6981, + "step": 362 + }, + { + "epoch": 0.2343447385409942, + "grad_norm": 2.4398525453415285, + "learning_rate": 5.4449999999999995e-05, + "loss": 2.5484, + "step": 363 + }, + { + "epoch": 0.23499031633311815, + "grad_norm": 2.5646527131139574, + "learning_rate": 5.459999999999999e-05, + "loss": 3.1072, + "step": 364 + }, + { + "epoch": 0.23563589412524208, + "grad_norm": 5.041883657391581, + "learning_rate": 5.4749999999999996e-05, + "loss": 2.7443, + "step": 365 + }, + { + "epoch": 0.23628147191736604, + "grad_norm": 2.552905391129791, + "learning_rate": 5.489999999999999e-05, + "loss": 2.8608, + "step": 366 + }, + { + "epoch": 0.23692704970949, + "grad_norm": 11.030604640120872, + "learning_rate": 5.5049999999999996e-05, + "loss": 2.8284, + "step": 367 + }, + { + "epoch": 0.23757262750161395, + "grad_norm": 6.5931806891126605, + "learning_rate": 5.519999999999999e-05, + "loss": 2.6075, + "step": 368 + }, + { + "epoch": 0.23821820529373788, + "grad_norm": 2.5009370032440232, + "learning_rate": 5.535e-05, + "loss": 2.7275, + "step": 369 + }, + { + "epoch": 0.23886378308586184, + "grad_norm": 2.8709494151903594, + "learning_rate": 5.5499999999999994e-05, + "loss": 2.4823, + "step": 370 + }, + { + "epoch": 0.2395093608779858, + "grad_norm": 26.492167441349846, + "learning_rate": 5.565e-05, + "loss": 2.9233, + "step": 371 + }, + { + "epoch": 0.24015493867010976, + "grad_norm": 2.835292957833368, + "learning_rate": 5.5799999999999994e-05, + "loss": 2.6281, + "step": 372 + }, + { + "epoch": 0.2408005164622337, + "grad_norm": 2.384660088671393, + "learning_rate": 5.595e-05, + "loss": 2.8845, + "step": 373 + }, + { + "epoch": 0.24144609425435765, + "grad_norm": 2.552751932562139, + "learning_rate": 5.6099999999999995e-05, + "loss": 2.9122, + "step": 374 + }, + { + "epoch": 0.2420916720464816, + "grad_norm": 2.313757335316188, + "learning_rate": 5.625e-05, + "loss": 2.6634, + "step": 375 + }, + { + "epoch": 0.24273724983860556, + "grad_norm": 2.3981902693172295, + "learning_rate": 5.6399999999999995e-05, + "loss": 2.7433, + "step": 376 + }, + { + "epoch": 0.2433828276307295, + "grad_norm": 2.9527840072360285, + "learning_rate": 5.654999999999999e-05, + "loss": 2.8935, + "step": 377 + }, + { + "epoch": 0.24402840542285345, + "grad_norm": 2.584175913645095, + "learning_rate": 5.6699999999999996e-05, + "loss": 2.8624, + "step": 378 + }, + { + "epoch": 0.2446739832149774, + "grad_norm": 2.827078297746934, + "learning_rate": 5.684999999999999e-05, + "loss": 3.1437, + "step": 379 + }, + { + "epoch": 0.24531956100710137, + "grad_norm": 2.990435019378541, + "learning_rate": 5.6999999999999996e-05, + "loss": 2.8932, + "step": 380 + }, + { + "epoch": 0.2459651387992253, + "grad_norm": 2.5486911723985397, + "learning_rate": 5.714999999999999e-05, + "loss": 2.8475, + "step": 381 + }, + { + "epoch": 0.24661071659134925, + "grad_norm": 2.815338069119461, + "learning_rate": 5.73e-05, + "loss": 2.9343, + "step": 382 + }, + { + "epoch": 0.2472562943834732, + "grad_norm": 2.637585484536922, + "learning_rate": 5.7449999999999994e-05, + "loss": 2.4072, + "step": 383 + }, + { + "epoch": 0.24790187217559717, + "grad_norm": 2.4907313571992677, + "learning_rate": 5.76e-05, + "loss": 2.7388, + "step": 384 + }, + { + "epoch": 0.2485474499677211, + "grad_norm": 2.737970996256245, + "learning_rate": 5.7749999999999994e-05, + "loss": 3.1248, + "step": 385 + }, + { + "epoch": 0.24919302775984506, + "grad_norm": 2.5937188280803354, + "learning_rate": 5.79e-05, + "loss": 2.9509, + "step": 386 + }, + { + "epoch": 0.24983860555196902, + "grad_norm": 2.3240096739079585, + "learning_rate": 5.8049999999999995e-05, + "loss": 2.9295, + "step": 387 + }, + { + "epoch": 0.25048418334409295, + "grad_norm": 2.5178078757825366, + "learning_rate": 5.82e-05, + "loss": 2.7358, + "step": 388 + }, + { + "epoch": 0.25112976113621693, + "grad_norm": 2.290638308327249, + "learning_rate": 5.8349999999999995e-05, + "loss": 2.883, + "step": 389 + }, + { + "epoch": 0.25177533892834086, + "grad_norm": 2.450025332848155, + "learning_rate": 5.85e-05, + "loss": 2.8622, + "step": 390 + }, + { + "epoch": 0.2524209167204648, + "grad_norm": 2.350156433959302, + "learning_rate": 5.8649999999999996e-05, + "loss": 2.925, + "step": 391 + }, + { + "epoch": 0.2530664945125888, + "grad_norm": 2.465892350638215, + "learning_rate": 5.88e-05, + "loss": 2.8835, + "step": 392 + }, + { + "epoch": 0.2537120723047127, + "grad_norm": 2.3869909721832934, + "learning_rate": 5.8949999999999996e-05, + "loss": 3.0981, + "step": 393 + }, + { + "epoch": 0.2543576500968367, + "grad_norm": 2.409904168021534, + "learning_rate": 5.91e-05, + "loss": 2.8914, + "step": 394 + }, + { + "epoch": 0.2550032278889606, + "grad_norm": 2.3850179528432447, + "learning_rate": 5.925e-05, + "loss": 2.7395, + "step": 395 + }, + { + "epoch": 0.25564880568108456, + "grad_norm": 2.493198530695449, + "learning_rate": 5.94e-05, + "loss": 2.9293, + "step": 396 + }, + { + "epoch": 0.25629438347320854, + "grad_norm": 2.257723234482677, + "learning_rate": 5.955e-05, + "loss": 2.3802, + "step": 397 + }, + { + "epoch": 0.2569399612653325, + "grad_norm": 2.211308849916495, + "learning_rate": 5.97e-05, + "loss": 2.3597, + "step": 398 + }, + { + "epoch": 0.2575855390574564, + "grad_norm": 2.709201521146019, + "learning_rate": 5.985e-05, + "loss": 2.8826, + "step": 399 + }, + { + "epoch": 0.2582311168495804, + "grad_norm": 2.433363833889051, + "learning_rate": 5.9999999999999995e-05, + "loss": 2.7682, + "step": 400 + }, + { + "epoch": 0.2588766946417043, + "grad_norm": 2.6886800880062895, + "learning_rate": 6.015e-05, + "loss": 2.9951, + "step": 401 + }, + { + "epoch": 0.25952227243382825, + "grad_norm": 2.404963652984053, + "learning_rate": 6.0299999999999995e-05, + "loss": 2.4013, + "step": 402 + }, + { + "epoch": 0.26016785022595224, + "grad_norm": 2.167028162728205, + "learning_rate": 6.045e-05, + "loss": 2.6626, + "step": 403 + }, + { + "epoch": 0.26081342801807617, + "grad_norm": 2.43607266873502, + "learning_rate": 6.0599999999999996e-05, + "loss": 2.8955, + "step": 404 + }, + { + "epoch": 0.26145900581020015, + "grad_norm": 2.231252663767769, + "learning_rate": 6.075e-05, + "loss": 2.6902, + "step": 405 + }, + { + "epoch": 0.2621045836023241, + "grad_norm": 2.31479761303201, + "learning_rate": 6.0899999999999996e-05, + "loss": 2.7503, + "step": 406 + }, + { + "epoch": 0.262750161394448, + "grad_norm": 2.360093513859793, + "learning_rate": 6.104999999999999e-05, + "loss": 2.674, + "step": 407 + }, + { + "epoch": 0.263395739186572, + "grad_norm": 2.7204922068475312, + "learning_rate": 6.12e-05, + "loss": 3.0055, + "step": 408 + }, + { + "epoch": 0.26404131697869593, + "grad_norm": 2.3262114811082237, + "learning_rate": 6.134999999999999e-05, + "loss": 2.8631, + "step": 409 + }, + { + "epoch": 0.26468689477081986, + "grad_norm": 2.210046279830668, + "learning_rate": 6.149999999999999e-05, + "loss": 2.7398, + "step": 410 + }, + { + "epoch": 0.26533247256294384, + "grad_norm": 2.2168186049279135, + "learning_rate": 6.165e-05, + "loss": 2.7912, + "step": 411 + }, + { + "epoch": 0.2659780503550678, + "grad_norm": 2.1982506707497387, + "learning_rate": 6.18e-05, + "loss": 2.9049, + "step": 412 + }, + { + "epoch": 0.26662362814719176, + "grad_norm": 2.2760695890830918, + "learning_rate": 6.194999999999999e-05, + "loss": 2.8445, + "step": 413 + }, + { + "epoch": 0.2672692059393157, + "grad_norm": 2.19610338708658, + "learning_rate": 6.209999999999999e-05, + "loss": 2.4046, + "step": 414 + }, + { + "epoch": 0.2679147837314396, + "grad_norm": 2.5305807673515646, + "learning_rate": 6.225e-05, + "loss": 3.0705, + "step": 415 + }, + { + "epoch": 0.2685603615235636, + "grad_norm": 2.4788330735442585, + "learning_rate": 6.239999999999999e-05, + "loss": 2.5277, + "step": 416 + }, + { + "epoch": 0.26920593931568754, + "grad_norm": 2.4828928561263925, + "learning_rate": 6.254999999999999e-05, + "loss": 2.8988, + "step": 417 + }, + { + "epoch": 0.26985151710781147, + "grad_norm": 2.2017554502253374, + "learning_rate": 6.269999999999999e-05, + "loss": 2.5966, + "step": 418 + }, + { + "epoch": 0.27049709489993545, + "grad_norm": 2.255040694694599, + "learning_rate": 6.285e-05, + "loss": 2.8446, + "step": 419 + }, + { + "epoch": 0.2711426726920594, + "grad_norm": 2.1531500785773163, + "learning_rate": 6.299999999999999e-05, + "loss": 2.8862, + "step": 420 + }, + { + "epoch": 0.27178825048418337, + "grad_norm": 2.1143031204513307, + "learning_rate": 6.314999999999999e-05, + "loss": 2.7232, + "step": 421 + }, + { + "epoch": 0.2724338282763073, + "grad_norm": 2.390471741738963, + "learning_rate": 6.33e-05, + "loss": 2.5202, + "step": 422 + }, + { + "epoch": 0.27307940606843123, + "grad_norm": 2.2624413865753237, + "learning_rate": 6.345e-05, + "loss": 2.4533, + "step": 423 + }, + { + "epoch": 0.2737249838605552, + "grad_norm": 2.587608685821866, + "learning_rate": 6.359999999999999e-05, + "loss": 2.8324, + "step": 424 + }, + { + "epoch": 0.27437056165267915, + "grad_norm": 2.754621685743186, + "learning_rate": 6.374999999999999e-05, + "loss": 2.8374, + "step": 425 + }, + { + "epoch": 0.2750161394448031, + "grad_norm": 2.157947487325006, + "learning_rate": 6.39e-05, + "loss": 2.6392, + "step": 426 + }, + { + "epoch": 0.27566171723692706, + "grad_norm": 2.366195465665874, + "learning_rate": 6.405e-05, + "loss": 2.8379, + "step": 427 + }, + { + "epoch": 0.276307295029051, + "grad_norm": 2.3294586552501673, + "learning_rate": 6.419999999999999e-05, + "loss": 2.8881, + "step": 428 + }, + { + "epoch": 0.276952872821175, + "grad_norm": 2.437114231847414, + "learning_rate": 6.434999999999999e-05, + "loss": 2.4152, + "step": 429 + }, + { + "epoch": 0.2775984506132989, + "grad_norm": 2.0284572699407386, + "learning_rate": 6.45e-05, + "loss": 2.6584, + "step": 430 + }, + { + "epoch": 0.27824402840542284, + "grad_norm": 2.2538070931960235, + "learning_rate": 6.465e-05, + "loss": 2.7311, + "step": 431 + }, + { + "epoch": 0.2788896061975468, + "grad_norm": 2.4983841213935643, + "learning_rate": 6.479999999999999e-05, + "loss": 2.9752, + "step": 432 + }, + { + "epoch": 0.27953518398967075, + "grad_norm": 2.560256954519061, + "learning_rate": 6.494999999999999e-05, + "loss": 2.6734, + "step": 433 + }, + { + "epoch": 0.2801807617817947, + "grad_norm": 2.252995963610315, + "learning_rate": 6.51e-05, + "loss": 2.3383, + "step": 434 + }, + { + "epoch": 0.28082633957391867, + "grad_norm": 2.4996847032998897, + "learning_rate": 6.525e-05, + "loss": 2.8763, + "step": 435 + }, + { + "epoch": 0.2814719173660426, + "grad_norm": 2.7177666246317456, + "learning_rate": 6.539999999999999e-05, + "loss": 2.684, + "step": 436 + }, + { + "epoch": 0.28211749515816653, + "grad_norm": 2.3209036343360947, + "learning_rate": 6.555e-05, + "loss": 2.4492, + "step": 437 + }, + { + "epoch": 0.2827630729502905, + "grad_norm": 2.150105502670163, + "learning_rate": 6.57e-05, + "loss": 2.6927, + "step": 438 + }, + { + "epoch": 0.28340865074241445, + "grad_norm": 2.035427100582523, + "learning_rate": 6.584999999999999e-05, + "loss": 2.8726, + "step": 439 + }, + { + "epoch": 0.28405422853453843, + "grad_norm": 2.189584339115553, + "learning_rate": 6.599999999999999e-05, + "loss": 2.8562, + "step": 440 + }, + { + "epoch": 0.28469980632666236, + "grad_norm": 2.188304799035174, + "learning_rate": 6.615e-05, + "loss": 2.8826, + "step": 441 + }, + { + "epoch": 0.2853453841187863, + "grad_norm": 2.156327684165163, + "learning_rate": 6.63e-05, + "loss": 2.7462, + "step": 442 + }, + { + "epoch": 0.2859909619109103, + "grad_norm": 2.3615901758870446, + "learning_rate": 6.644999999999999e-05, + "loss": 2.8639, + "step": 443 + }, + { + "epoch": 0.2866365397030342, + "grad_norm": 2.2686730217734032, + "learning_rate": 6.659999999999999e-05, + "loss": 2.8248, + "step": 444 + }, + { + "epoch": 0.28728211749515814, + "grad_norm": 2.3917547776098482, + "learning_rate": 6.675e-05, + "loss": 2.8477, + "step": 445 + }, + { + "epoch": 0.2879276952872821, + "grad_norm": 2.3137602997736866, + "learning_rate": 6.69e-05, + "loss": 2.7799, + "step": 446 + }, + { + "epoch": 0.28857327307940606, + "grad_norm": 2.3290432244026866, + "learning_rate": 6.704999999999999e-05, + "loss": 2.8051, + "step": 447 + }, + { + "epoch": 0.28921885087153004, + "grad_norm": 2.1598235083589836, + "learning_rate": 6.72e-05, + "loss": 2.8225, + "step": 448 + }, + { + "epoch": 0.289864428663654, + "grad_norm": 2.0718012522124805, + "learning_rate": 6.735e-05, + "loss": 2.6137, + "step": 449 + }, + { + "epoch": 0.2905100064557779, + "grad_norm": 2.2542347040871764, + "learning_rate": 6.75e-05, + "loss": 2.6469, + "step": 450 + }, + { + "epoch": 0.2911555842479019, + "grad_norm": 2.1030826687316857, + "learning_rate": 6.764999999999999e-05, + "loss": 2.7853, + "step": 451 + }, + { + "epoch": 0.2918011620400258, + "grad_norm": 2.3541028777538906, + "learning_rate": 6.78e-05, + "loss": 2.3452, + "step": 452 + }, + { + "epoch": 0.29244673983214975, + "grad_norm": 2.252898948567988, + "learning_rate": 6.795e-05, + "loss": 2.7585, + "step": 453 + }, + { + "epoch": 0.29309231762427373, + "grad_norm": 2.2730779873912352, + "learning_rate": 6.81e-05, + "loss": 2.9443, + "step": 454 + }, + { + "epoch": 0.29373789541639767, + "grad_norm": 2.3207401973151556, + "learning_rate": 6.824999999999999e-05, + "loss": 2.958, + "step": 455 + }, + { + "epoch": 0.29438347320852165, + "grad_norm": 2.203034791432245, + "learning_rate": 6.84e-05, + "loss": 2.769, + "step": 456 + }, + { + "epoch": 0.2950290510006456, + "grad_norm": 2.2557208205146413, + "learning_rate": 6.855e-05, + "loss": 2.3253, + "step": 457 + }, + { + "epoch": 0.2956746287927695, + "grad_norm": 2.190473233405859, + "learning_rate": 6.87e-05, + "loss": 2.8469, + "step": 458 + }, + { + "epoch": 0.2963202065848935, + "grad_norm": 2.2090564885396033, + "learning_rate": 6.884999999999999e-05, + "loss": 2.8548, + "step": 459 + }, + { + "epoch": 0.2969657843770174, + "grad_norm": 2.3469429242372284, + "learning_rate": 6.9e-05, + "loss": 2.7192, + "step": 460 + }, + { + "epoch": 0.29761136216914136, + "grad_norm": 2.2242641290458876, + "learning_rate": 6.915e-05, + "loss": 2.8379, + "step": 461 + }, + { + "epoch": 0.29825693996126534, + "grad_norm": 2.090245089971745, + "learning_rate": 6.93e-05, + "loss": 2.8548, + "step": 462 + }, + { + "epoch": 0.2989025177533893, + "grad_norm": 2.043886945032039, + "learning_rate": 6.945e-05, + "loss": 2.7265, + "step": 463 + }, + { + "epoch": 0.29954809554551326, + "grad_norm": 2.1055619774246015, + "learning_rate": 6.96e-05, + "loss": 2.6744, + "step": 464 + }, + { + "epoch": 0.3001936733376372, + "grad_norm": 2.0505674001785663, + "learning_rate": 6.975e-05, + "loss": 2.819, + "step": 465 + }, + { + "epoch": 0.3008392511297611, + "grad_norm": 2.060011479148182, + "learning_rate": 6.989999999999999e-05, + "loss": 2.8889, + "step": 466 + }, + { + "epoch": 0.3014848289218851, + "grad_norm": 2.2146381401577258, + "learning_rate": 7.005e-05, + "loss": 2.6328, + "step": 467 + }, + { + "epoch": 0.30213040671400904, + "grad_norm": 2.224042010010175, + "learning_rate": 7.02e-05, + "loss": 2.7907, + "step": 468 + }, + { + "epoch": 0.30277598450613297, + "grad_norm": 2.1858938664634553, + "learning_rate": 7.034999999999999e-05, + "loss": 2.8894, + "step": 469 + }, + { + "epoch": 0.30342156229825695, + "grad_norm": 2.021975169497758, + "learning_rate": 7.049999999999999e-05, + "loss": 2.445, + "step": 470 + }, + { + "epoch": 0.3040671400903809, + "grad_norm": 2.3673568969981638, + "learning_rate": 7.065e-05, + "loss": 2.932, + "step": 471 + }, + { + "epoch": 0.30471271788250487, + "grad_norm": 2.5878821593097383, + "learning_rate": 7.079999999999999e-05, + "loss": 2.8635, + "step": 472 + }, + { + "epoch": 0.3053582956746288, + "grad_norm": 2.130188511153066, + "learning_rate": 7.094999999999999e-05, + "loss": 2.9932, + "step": 473 + }, + { + "epoch": 0.30600387346675273, + "grad_norm": 1.929777084635461, + "learning_rate": 7.11e-05, + "loss": 2.818, + "step": 474 + }, + { + "epoch": 0.3066494512588767, + "grad_norm": 2.137632476593421, + "learning_rate": 7.125e-05, + "loss": 2.8527, + "step": 475 + }, + { + "epoch": 0.30729502905100065, + "grad_norm": 2.2007259702017703, + "learning_rate": 7.139999999999999e-05, + "loss": 3.1356, + "step": 476 + }, + { + "epoch": 0.3079406068431246, + "grad_norm": 2.3960483337563203, + "learning_rate": 7.154999999999999e-05, + "loss": 2.9433, + "step": 477 + }, + { + "epoch": 0.30858618463524856, + "grad_norm": 2.3172579952356926, + "learning_rate": 7.17e-05, + "loss": 2.6687, + "step": 478 + }, + { + "epoch": 0.3092317624273725, + "grad_norm": 2.614745757916953, + "learning_rate": 7.184999999999998e-05, + "loss": 2.9881, + "step": 479 + }, + { + "epoch": 0.3098773402194964, + "grad_norm": 2.107128772995155, + "learning_rate": 7.199999999999999e-05, + "loss": 2.6783, + "step": 480 + }, + { + "epoch": 0.3105229180116204, + "grad_norm": 2.234208286311728, + "learning_rate": 7.214999999999999e-05, + "loss": 2.8182, + "step": 481 + }, + { + "epoch": 0.31116849580374434, + "grad_norm": 2.0250781901500776, + "learning_rate": 7.23e-05, + "loss": 2.9035, + "step": 482 + }, + { + "epoch": 0.3118140735958683, + "grad_norm": 2.4396515962320944, + "learning_rate": 7.244999999999999e-05, + "loss": 2.9379, + "step": 483 + }, + { + "epoch": 0.31245965138799225, + "grad_norm": 2.7857246909755475, + "learning_rate": 7.259999999999999e-05, + "loss": 2.9766, + "step": 484 + }, + { + "epoch": 0.3131052291801162, + "grad_norm": 2.301791232251116, + "learning_rate": 7.274999999999999e-05, + "loss": 2.7281, + "step": 485 + }, + { + "epoch": 0.31375080697224017, + "grad_norm": 2.2699037005707834, + "learning_rate": 7.29e-05, + "loss": 3.0159, + "step": 486 + }, + { + "epoch": 0.3143963847643641, + "grad_norm": 3.437292682785185, + "learning_rate": 7.304999999999999e-05, + "loss": 2.4555, + "step": 487 + }, + { + "epoch": 0.31504196255648803, + "grad_norm": 2.176927337627641, + "learning_rate": 7.319999999999999e-05, + "loss": 2.6848, + "step": 488 + }, + { + "epoch": 0.315687540348612, + "grad_norm": 2.2757911209841946, + "learning_rate": 7.335e-05, + "loss": 2.9729, + "step": 489 + }, + { + "epoch": 0.31633311814073595, + "grad_norm": 2.1637022016232645, + "learning_rate": 7.35e-05, + "loss": 2.7292, + "step": 490 + }, + { + "epoch": 0.31697869593285993, + "grad_norm": 2.2875410426820224, + "learning_rate": 7.364999999999999e-05, + "loss": 2.9322, + "step": 491 + }, + { + "epoch": 0.31762427372498386, + "grad_norm": 2.199013134774917, + "learning_rate": 7.379999999999999e-05, + "loss": 2.9291, + "step": 492 + }, + { + "epoch": 0.3182698515171078, + "grad_norm": 2.5300280890964357, + "learning_rate": 7.395e-05, + "loss": 2.6937, + "step": 493 + }, + { + "epoch": 0.3189154293092318, + "grad_norm": 2.7785719466957657, + "learning_rate": 7.41e-05, + "loss": 2.7382, + "step": 494 + }, + { + "epoch": 0.3195610071013557, + "grad_norm": 2.090643760345164, + "learning_rate": 7.424999999999999e-05, + "loss": 2.7278, + "step": 495 + }, + { + "epoch": 0.32020658489347964, + "grad_norm": 2.0005363498584803, + "learning_rate": 7.439999999999999e-05, + "loss": 2.7465, + "step": 496 + }, + { + "epoch": 0.3208521626856036, + "grad_norm": 2.08885283487973, + "learning_rate": 7.455e-05, + "loss": 2.9346, + "step": 497 + }, + { + "epoch": 0.32149774047772756, + "grad_norm": 2.0127462857298073, + "learning_rate": 7.47e-05, + "loss": 2.7898, + "step": 498 + }, + { + "epoch": 0.32214331826985154, + "grad_norm": 2.834099040410658, + "learning_rate": 7.484999999999999e-05, + "loss": 2.3041, + "step": 499 + }, + { + "epoch": 0.32278889606197547, + "grad_norm": 1.9410899244091093, + "learning_rate": 7.5e-05, + "loss": 2.7032, + "step": 500 + }, + { + "epoch": 0.3234344738540994, + "grad_norm": 1.908872002363754, + "learning_rate": 7.515e-05, + "loss": 2.2422, + "step": 501 + }, + { + "epoch": 0.3240800516462234, + "grad_norm": 2.541209905455719, + "learning_rate": 7.529999999999999e-05, + "loss": 2.8394, + "step": 502 + }, + { + "epoch": 0.3247256294383473, + "grad_norm": 2.336479457688399, + "learning_rate": 7.544999999999999e-05, + "loss": 2.6607, + "step": 503 + }, + { + "epoch": 0.32537120723047125, + "grad_norm": 2.182228446372999, + "learning_rate": 7.56e-05, + "loss": 2.9049, + "step": 504 + }, + { + "epoch": 0.32601678502259523, + "grad_norm": 2.080810235421079, + "learning_rate": 7.575e-05, + "loss": 2.8838, + "step": 505 + }, + { + "epoch": 0.32666236281471916, + "grad_norm": 2.045249726156895, + "learning_rate": 7.589999999999999e-05, + "loss": 2.5634, + "step": 506 + }, + { + "epoch": 0.32730794060684315, + "grad_norm": 2.134968037766538, + "learning_rate": 7.604999999999999e-05, + "loss": 2.697, + "step": 507 + }, + { + "epoch": 0.3279535183989671, + "grad_norm": 2.223749687882318, + "learning_rate": 7.62e-05, + "loss": 2.857, + "step": 508 + }, + { + "epoch": 0.328599096191091, + "grad_norm": 1.9208945124340413, + "learning_rate": 7.635e-05, + "loss": 2.8188, + "step": 509 + }, + { + "epoch": 0.329244673983215, + "grad_norm": 1.996625204181139, + "learning_rate": 7.649999999999999e-05, + "loss": 2.7597, + "step": 510 + }, + { + "epoch": 0.3298902517753389, + "grad_norm": 2.5146285801644868, + "learning_rate": 7.664999999999999e-05, + "loss": 2.9076, + "step": 511 + }, + { + "epoch": 0.33053582956746286, + "grad_norm": 1.997032140459991, + "learning_rate": 7.68e-05, + "loss": 2.415, + "step": 512 + }, + { + "epoch": 0.33118140735958684, + "grad_norm": 2.181537475437808, + "learning_rate": 7.695e-05, + "loss": 2.6273, + "step": 513 + }, + { + "epoch": 0.3318269851517108, + "grad_norm": 2.194993030287855, + "learning_rate": 7.709999999999999e-05, + "loss": 2.8523, + "step": 514 + }, + { + "epoch": 0.33247256294383476, + "grad_norm": 2.3962251561608268, + "learning_rate": 7.725e-05, + "loss": 2.6014, + "step": 515 + }, + { + "epoch": 0.3331181407359587, + "grad_norm": 2.1783641221079626, + "learning_rate": 7.74e-05, + "loss": 2.7145, + "step": 516 + }, + { + "epoch": 0.3337637185280826, + "grad_norm": 2.098188811117611, + "learning_rate": 7.755e-05, + "loss": 2.7684, + "step": 517 + }, + { + "epoch": 0.3344092963202066, + "grad_norm": 2.198883552815432, + "learning_rate": 7.769999999999999e-05, + "loss": 2.6813, + "step": 518 + }, + { + "epoch": 0.33505487411233054, + "grad_norm": 3.022456913638073, + "learning_rate": 7.785e-05, + "loss": 2.4373, + "step": 519 + }, + { + "epoch": 0.33570045190445447, + "grad_norm": 1.9920083683575005, + "learning_rate": 7.8e-05, + "loss": 2.4024, + "step": 520 + }, + { + "epoch": 0.33634602969657845, + "grad_norm": 1.9662073670627678, + "learning_rate": 7.815e-05, + "loss": 2.7558, + "step": 521 + }, + { + "epoch": 0.3369916074887024, + "grad_norm": 1.9519800452526734, + "learning_rate": 7.829999999999999e-05, + "loss": 2.4289, + "step": 522 + }, + { + "epoch": 0.3376371852808263, + "grad_norm": 2.276713287173242, + "learning_rate": 7.845e-05, + "loss": 2.7689, + "step": 523 + }, + { + "epoch": 0.3382827630729503, + "grad_norm": 2.3575331455658497, + "learning_rate": 7.86e-05, + "loss": 2.6331, + "step": 524 + }, + { + "epoch": 0.33892834086507423, + "grad_norm": 2.2320070399315988, + "learning_rate": 7.874999999999999e-05, + "loss": 2.7475, + "step": 525 + }, + { + "epoch": 0.3395739186571982, + "grad_norm": 2.876610892039321, + "learning_rate": 7.89e-05, + "loss": 2.6184, + "step": 526 + }, + { + "epoch": 0.34021949644932215, + "grad_norm": 2.494916058193855, + "learning_rate": 7.905e-05, + "loss": 2.9008, + "step": 527 + }, + { + "epoch": 0.3408650742414461, + "grad_norm": 1.9551429144546668, + "learning_rate": 7.92e-05, + "loss": 2.5141, + "step": 528 + }, + { + "epoch": 0.34151065203357006, + "grad_norm": 2.0291711656428086, + "learning_rate": 7.934999999999999e-05, + "loss": 2.3618, + "step": 529 + }, + { + "epoch": 0.342156229825694, + "grad_norm": 2.0386354572339, + "learning_rate": 7.95e-05, + "loss": 2.9891, + "step": 530 + }, + { + "epoch": 0.3428018076178179, + "grad_norm": 2.0915049944141195, + "learning_rate": 7.965e-05, + "loss": 2.6765, + "step": 531 + }, + { + "epoch": 0.3434473854099419, + "grad_norm": 2.451513794772121, + "learning_rate": 7.98e-05, + "loss": 2.8532, + "step": 532 + }, + { + "epoch": 0.34409296320206584, + "grad_norm": 2.21609230687707, + "learning_rate": 7.994999999999999e-05, + "loss": 2.7466, + "step": 533 + }, + { + "epoch": 0.3447385409941898, + "grad_norm": 1.876116044220905, + "learning_rate": 8.01e-05, + "loss": 2.6595, + "step": 534 + }, + { + "epoch": 0.34538411878631375, + "grad_norm": 2.0313305046053927, + "learning_rate": 8.025e-05, + "loss": 2.757, + "step": 535 + }, + { + "epoch": 0.3460296965784377, + "grad_norm": 1.962726251712643, + "learning_rate": 8.04e-05, + "loss": 2.7075, + "step": 536 + }, + { + "epoch": 0.34667527437056167, + "grad_norm": 2.087356710299744, + "learning_rate": 8.054999999999999e-05, + "loss": 2.3704, + "step": 537 + }, + { + "epoch": 0.3473208521626856, + "grad_norm": 1.8912513947467464, + "learning_rate": 8.07e-05, + "loss": 2.7237, + "step": 538 + }, + { + "epoch": 0.34796642995480953, + "grad_norm": 2.5219112775208328, + "learning_rate": 8.085e-05, + "loss": 2.8534, + "step": 539 + }, + { + "epoch": 0.3486120077469335, + "grad_norm": 2.035439146419951, + "learning_rate": 8.1e-05, + "loss": 2.9198, + "step": 540 + }, + { + "epoch": 0.34925758553905745, + "grad_norm": 2.0773382186758895, + "learning_rate": 8.115e-05, + "loss": 2.8958, + "step": 541 + }, + { + "epoch": 0.34990316333118143, + "grad_norm": 1.9414827794289489, + "learning_rate": 8.13e-05, + "loss": 2.7733, + "step": 542 + }, + { + "epoch": 0.35054874112330536, + "grad_norm": 1.9689877015861885, + "learning_rate": 8.145e-05, + "loss": 2.8515, + "step": 543 + }, + { + "epoch": 0.3511943189154293, + "grad_norm": 1.9267766034181952, + "learning_rate": 8.16e-05, + "loss": 2.727, + "step": 544 + }, + { + "epoch": 0.3518398967075533, + "grad_norm": 2.0736636719354546, + "learning_rate": 8.175e-05, + "loss": 2.6266, + "step": 545 + }, + { + "epoch": 0.3524854744996772, + "grad_norm": 2.170014169941677, + "learning_rate": 8.19e-05, + "loss": 2.929, + "step": 546 + }, + { + "epoch": 0.35313105229180114, + "grad_norm": 1.8453127572686103, + "learning_rate": 8.205e-05, + "loss": 2.876, + "step": 547 + }, + { + "epoch": 0.3537766300839251, + "grad_norm": 2.1554310547776483, + "learning_rate": 8.22e-05, + "loss": 2.7604, + "step": 548 + }, + { + "epoch": 0.35442220787604906, + "grad_norm": 1.8450555388784111, + "learning_rate": 8.235e-05, + "loss": 2.7818, + "step": 549 + }, + { + "epoch": 0.35506778566817304, + "grad_norm": 1.841139173005452, + "learning_rate": 8.25e-05, + "loss": 2.759, + "step": 550 + }, + { + "epoch": 0.35571336346029697, + "grad_norm": 1.9272942833618159, + "learning_rate": 8.265e-05, + "loss": 2.8715, + "step": 551 + }, + { + "epoch": 0.3563589412524209, + "grad_norm": 2.0263642077394937, + "learning_rate": 8.28e-05, + "loss": 2.9521, + "step": 552 + }, + { + "epoch": 0.3570045190445449, + "grad_norm": 1.943884823382823, + "learning_rate": 8.295e-05, + "loss": 2.8165, + "step": 553 + }, + { + "epoch": 0.3576500968366688, + "grad_norm": 1.720429568090338, + "learning_rate": 8.31e-05, + "loss": 2.6862, + "step": 554 + }, + { + "epoch": 0.35829567462879275, + "grad_norm": 1.9439493984293035, + "learning_rate": 8.325e-05, + "loss": 2.6929, + "step": 555 + }, + { + "epoch": 0.35894125242091673, + "grad_norm": 2.8437723548215743, + "learning_rate": 8.34e-05, + "loss": 2.801, + "step": 556 + }, + { + "epoch": 0.35958683021304066, + "grad_norm": 2.180156859285289, + "learning_rate": 8.355e-05, + "loss": 2.2705, + "step": 557 + }, + { + "epoch": 0.3602324080051646, + "grad_norm": 2.002307505395898, + "learning_rate": 8.37e-05, + "loss": 2.7175, + "step": 558 + }, + { + "epoch": 0.3608779857972886, + "grad_norm": 2.0793232507685495, + "learning_rate": 8.385e-05, + "loss": 2.2671, + "step": 559 + }, + { + "epoch": 0.3615235635894125, + "grad_norm": 2.514564966923557, + "learning_rate": 8.4e-05, + "loss": 2.6001, + "step": 560 + }, + { + "epoch": 0.3621691413815365, + "grad_norm": 2.023031453831732, + "learning_rate": 8.415e-05, + "loss": 2.6034, + "step": 561 + }, + { + "epoch": 0.3628147191736604, + "grad_norm": 1.9774782148933796, + "learning_rate": 8.43e-05, + "loss": 2.7896, + "step": 562 + }, + { + "epoch": 0.36346029696578436, + "grad_norm": 1.846824919069077, + "learning_rate": 8.444999999999998e-05, + "loss": 2.8024, + "step": 563 + }, + { + "epoch": 0.36410587475790834, + "grad_norm": 2.2657816787424614, + "learning_rate": 8.459999999999998e-05, + "loss": 2.618, + "step": 564 + }, + { + "epoch": 0.3647514525500323, + "grad_norm": 2.111117532605048, + "learning_rate": 8.474999999999999e-05, + "loss": 2.6991, + "step": 565 + }, + { + "epoch": 0.3653970303421562, + "grad_norm": 2.293356722604469, + "learning_rate": 8.489999999999999e-05, + "loss": 2.9866, + "step": 566 + }, + { + "epoch": 0.3660426081342802, + "grad_norm": 2.0477148929027065, + "learning_rate": 8.504999999999998e-05, + "loss": 2.8028, + "step": 567 + }, + { + "epoch": 0.3666881859264041, + "grad_norm": 1.8579251061324027, + "learning_rate": 8.519999999999998e-05, + "loss": 2.8536, + "step": 568 + }, + { + "epoch": 0.3673337637185281, + "grad_norm": 2.176276657605816, + "learning_rate": 8.534999999999999e-05, + "loss": 2.6342, + "step": 569 + }, + { + "epoch": 0.36797934151065204, + "grad_norm": 1.8732346711732664, + "learning_rate": 8.549999999999999e-05, + "loss": 2.7584, + "step": 570 + }, + { + "epoch": 0.36862491930277597, + "grad_norm": 2.029553486268857, + "learning_rate": 8.564999999999998e-05, + "loss": 2.819, + "step": 571 + }, + { + "epoch": 0.36927049709489995, + "grad_norm": 1.781646538308032, + "learning_rate": 8.579999999999998e-05, + "loss": 2.6398, + "step": 572 + }, + { + "epoch": 0.3699160748870239, + "grad_norm": 1.7442919434398025, + "learning_rate": 8.594999999999999e-05, + "loss": 2.6224, + "step": 573 + }, + { + "epoch": 0.3705616526791478, + "grad_norm": 1.9483452976468694, + "learning_rate": 8.609999999999999e-05, + "loss": 2.5443, + "step": 574 + }, + { + "epoch": 0.3712072304712718, + "grad_norm": 1.9445075355169308, + "learning_rate": 8.624999999999998e-05, + "loss": 2.4834, + "step": 575 + }, + { + "epoch": 0.37185280826339573, + "grad_norm": 1.8442893306786663, + "learning_rate": 8.639999999999999e-05, + "loss": 2.5387, + "step": 576 + }, + { + "epoch": 0.3724983860555197, + "grad_norm": 1.7366940341239077, + "learning_rate": 8.654999999999999e-05, + "loss": 2.1651, + "step": 577 + }, + { + "epoch": 0.37314396384764364, + "grad_norm": 2.0186752603624996, + "learning_rate": 8.669999999999998e-05, + "loss": 2.561, + "step": 578 + }, + { + "epoch": 0.3737895416397676, + "grad_norm": 1.7370171841903217, + "learning_rate": 8.684999999999998e-05, + "loss": 2.0519, + "step": 579 + }, + { + "epoch": 0.37443511943189156, + "grad_norm": 2.2025637856463174, + "learning_rate": 8.699999999999999e-05, + "loss": 2.8712, + "step": 580 + }, + { + "epoch": 0.3750806972240155, + "grad_norm": 1.9847895905537982, + "learning_rate": 8.714999999999999e-05, + "loss": 2.5493, + "step": 581 + }, + { + "epoch": 0.3757262750161394, + "grad_norm": 2.012012667943063, + "learning_rate": 8.729999999999998e-05, + "loss": 2.5487, + "step": 582 + }, + { + "epoch": 0.3763718528082634, + "grad_norm": 2.4023069337853955, + "learning_rate": 8.744999999999998e-05, + "loss": 2.692, + "step": 583 + }, + { + "epoch": 0.37701743060038734, + "grad_norm": 1.7539136464865255, + "learning_rate": 8.759999999999999e-05, + "loss": 2.4952, + "step": 584 + }, + { + "epoch": 0.3776630083925113, + "grad_norm": 2.1340233789375453, + "learning_rate": 8.774999999999999e-05, + "loss": 2.9544, + "step": 585 + }, + { + "epoch": 0.37830858618463525, + "grad_norm": 1.9583770129160998, + "learning_rate": 8.789999999999998e-05, + "loss": 2.7273, + "step": 586 + }, + { + "epoch": 0.3789541639767592, + "grad_norm": 2.221472885219129, + "learning_rate": 8.804999999999999e-05, + "loss": 2.6618, + "step": 587 + }, + { + "epoch": 0.37959974176888317, + "grad_norm": 1.970963044705392, + "learning_rate": 8.819999999999999e-05, + "loss": 2.6962, + "step": 588 + }, + { + "epoch": 0.3802453195610071, + "grad_norm": 2.0192443447183996, + "learning_rate": 8.834999999999999e-05, + "loss": 2.831, + "step": 589 + }, + { + "epoch": 0.38089089735313103, + "grad_norm": 2.134796248937337, + "learning_rate": 8.849999999999998e-05, + "loss": 2.9506, + "step": 590 + }, + { + "epoch": 0.381536475145255, + "grad_norm": 2.2433361069254025, + "learning_rate": 8.864999999999999e-05, + "loss": 2.5345, + "step": 591 + }, + { + "epoch": 0.38218205293737895, + "grad_norm": 1.9610153537483652, + "learning_rate": 8.879999999999999e-05, + "loss": 2.661, + "step": 592 + }, + { + "epoch": 0.38282763072950293, + "grad_norm": 2.2034639165930408, + "learning_rate": 8.895e-05, + "loss": 2.8058, + "step": 593 + }, + { + "epoch": 0.38347320852162686, + "grad_norm": 1.946509554766863, + "learning_rate": 8.909999999999998e-05, + "loss": 2.7998, + "step": 594 + }, + { + "epoch": 0.3841187863137508, + "grad_norm": 1.743763739352499, + "learning_rate": 8.924999999999999e-05, + "loss": 2.8017, + "step": 595 + }, + { + "epoch": 0.3847643641058748, + "grad_norm": 1.919458797670059, + "learning_rate": 8.939999999999999e-05, + "loss": 2.7613, + "step": 596 + }, + { + "epoch": 0.3854099418979987, + "grad_norm": 1.7182814354064677, + "learning_rate": 8.955e-05, + "loss": 2.7881, + "step": 597 + }, + { + "epoch": 0.38605551969012264, + "grad_norm": 1.890309132469626, + "learning_rate": 8.969999999999998e-05, + "loss": 2.5191, + "step": 598 + }, + { + "epoch": 0.3867010974822466, + "grad_norm": 2.004677268931328, + "learning_rate": 8.984999999999999e-05, + "loss": 2.693, + "step": 599 + }, + { + "epoch": 0.38734667527437056, + "grad_norm": 1.8560934980399666, + "learning_rate": 8.999999999999999e-05, + "loss": 2.3177, + "step": 600 + }, + { + "epoch": 0.3879922530664945, + "grad_norm": 1.9372339947006005, + "learning_rate": 9.014999999999998e-05, + "loss": 2.6678, + "step": 601 + }, + { + "epoch": 0.38863783085861847, + "grad_norm": 1.9438809130858807, + "learning_rate": 9.029999999999999e-05, + "loss": 2.7238, + "step": 602 + }, + { + "epoch": 0.3892834086507424, + "grad_norm": 2.18986483665489, + "learning_rate": 9.044999999999999e-05, + "loss": 2.7854, + "step": 603 + }, + { + "epoch": 0.3899289864428664, + "grad_norm": 2.168320494397116, + "learning_rate": 9.059999999999999e-05, + "loss": 2.9744, + "step": 604 + }, + { + "epoch": 0.3905745642349903, + "grad_norm": 2.0141838432536976, + "learning_rate": 9.074999999999998e-05, + "loss": 2.7219, + "step": 605 + }, + { + "epoch": 0.39122014202711425, + "grad_norm": 2.114826618931071, + "learning_rate": 9.089999999999999e-05, + "loss": 2.2354, + "step": 606 + }, + { + "epoch": 0.39186571981923823, + "grad_norm": 2.4075524832077346, + "learning_rate": 9.104999999999999e-05, + "loss": 2.7243, + "step": 607 + }, + { + "epoch": 0.39251129761136216, + "grad_norm": 2.650667964646802, + "learning_rate": 9.12e-05, + "loss": 2.7561, + "step": 608 + }, + { + "epoch": 0.3931568754034861, + "grad_norm": 1.9534406731171776, + "learning_rate": 9.134999999999998e-05, + "loss": 2.8165, + "step": 609 + }, + { + "epoch": 0.3938024531956101, + "grad_norm": 2.096443828479125, + "learning_rate": 9.149999999999999e-05, + "loss": 2.7106, + "step": 610 + }, + { + "epoch": 0.394448030987734, + "grad_norm": 2.4710173795972423, + "learning_rate": 9.164999999999999e-05, + "loss": 2.7987, + "step": 611 + }, + { + "epoch": 0.395093608779858, + "grad_norm": 1.6377460365265566, + "learning_rate": 9.18e-05, + "loss": 2.5893, + "step": 612 + }, + { + "epoch": 0.3957391865719819, + "grad_norm": 1.8813042216264513, + "learning_rate": 9.194999999999999e-05, + "loss": 2.6183, + "step": 613 + }, + { + "epoch": 0.39638476436410586, + "grad_norm": 1.8798368483161683, + "learning_rate": 9.209999999999999e-05, + "loss": 2.6051, + "step": 614 + }, + { + "epoch": 0.39703034215622984, + "grad_norm": 1.7859269532783162, + "learning_rate": 9.224999999999999e-05, + "loss": 2.592, + "step": 615 + }, + { + "epoch": 0.3976759199483538, + "grad_norm": 1.8783046788033968, + "learning_rate": 9.24e-05, + "loss": 2.6741, + "step": 616 + }, + { + "epoch": 0.3983214977404777, + "grad_norm": 1.7726499941225138, + "learning_rate": 9.254999999999999e-05, + "loss": 2.772, + "step": 617 + }, + { + "epoch": 0.3989670755326017, + "grad_norm": 1.8950284831384214, + "learning_rate": 9.269999999999999e-05, + "loss": 2.7593, + "step": 618 + }, + { + "epoch": 0.3996126533247256, + "grad_norm": 2.236602498463451, + "learning_rate": 9.285e-05, + "loss": 2.6592, + "step": 619 + }, + { + "epoch": 0.4002582311168496, + "grad_norm": 1.9657914551027618, + "learning_rate": 9.3e-05, + "loss": 2.8155, + "step": 620 + }, + { + "epoch": 0.40090380890897354, + "grad_norm": 1.6215919417432174, + "learning_rate": 9.314999999999999e-05, + "loss": 2.628, + "step": 621 + }, + { + "epoch": 0.40154938670109747, + "grad_norm": 1.939853479314672, + "learning_rate": 9.329999999999999e-05, + "loss": 2.574, + "step": 622 + }, + { + "epoch": 0.40219496449322145, + "grad_norm": 1.9590095491416857, + "learning_rate": 9.345e-05, + "loss": 2.5356, + "step": 623 + }, + { + "epoch": 0.4028405422853454, + "grad_norm": 2.285347960920685, + "learning_rate": 9.36e-05, + "loss": 2.74, + "step": 624 + }, + { + "epoch": 0.4034861200774693, + "grad_norm": 2.200741425388686, + "learning_rate": 9.374999999999999e-05, + "loss": 2.7572, + "step": 625 + }, + { + "epoch": 0.4041316978695933, + "grad_norm": 1.9496510097583681, + "learning_rate": 9.389999999999999e-05, + "loss": 2.305, + "step": 626 + }, + { + "epoch": 0.40477727566171723, + "grad_norm": 2.210340214334422, + "learning_rate": 9.405e-05, + "loss": 2.4845, + "step": 627 + }, + { + "epoch": 0.4054228534538412, + "grad_norm": 2.0603885705677514, + "learning_rate": 9.419999999999999e-05, + "loss": 2.7494, + "step": 628 + }, + { + "epoch": 0.40606843124596514, + "grad_norm": 2.1829748113487883, + "learning_rate": 9.434999999999999e-05, + "loss": 2.645, + "step": 629 + }, + { + "epoch": 0.4067140090380891, + "grad_norm": 2.408942779485642, + "learning_rate": 9.449999999999999e-05, + "loss": 2.5289, + "step": 630 + }, + { + "epoch": 0.40735958683021306, + "grad_norm": 1.9997614644117945, + "learning_rate": 9.465e-05, + "loss": 2.9772, + "step": 631 + }, + { + "epoch": 0.408005164622337, + "grad_norm": 1.595413236746543, + "learning_rate": 9.479999999999999e-05, + "loss": 2.6691, + "step": 632 + }, + { + "epoch": 0.4086507424144609, + "grad_norm": 1.7734920535136007, + "learning_rate": 9.494999999999999e-05, + "loss": 2.7065, + "step": 633 + }, + { + "epoch": 0.4092963202065849, + "grad_norm": 1.884762215089833, + "learning_rate": 9.51e-05, + "loss": 2.7652, + "step": 634 + }, + { + "epoch": 0.40994189799870884, + "grad_norm": 1.8271005655917905, + "learning_rate": 9.525e-05, + "loss": 2.6945, + "step": 635 + }, + { + "epoch": 0.41058747579083277, + "grad_norm": 1.8031153540643454, + "learning_rate": 9.539999999999999e-05, + "loss": 2.6493, + "step": 636 + }, + { + "epoch": 0.41123305358295675, + "grad_norm": 1.7023676125456713, + "learning_rate": 9.554999999999999e-05, + "loss": 2.7589, + "step": 637 + }, + { + "epoch": 0.4118786313750807, + "grad_norm": 2.0174937309319514, + "learning_rate": 9.57e-05, + "loss": 2.6449, + "step": 638 + }, + { + "epoch": 0.41252420916720467, + "grad_norm": 2.0813851738326563, + "learning_rate": 9.585e-05, + "loss": 2.3256, + "step": 639 + }, + { + "epoch": 0.4131697869593286, + "grad_norm": 1.9425303357820496, + "learning_rate": 9.599999999999999e-05, + "loss": 2.9369, + "step": 640 + }, + { + "epoch": 0.41381536475145253, + "grad_norm": 2.1581495917187556, + "learning_rate": 9.614999999999999e-05, + "loss": 3.0882, + "step": 641 + }, + { + "epoch": 0.4144609425435765, + "grad_norm": 1.921544555073821, + "learning_rate": 9.63e-05, + "loss": 2.7634, + "step": 642 + }, + { + "epoch": 0.41510652033570045, + "grad_norm": 1.6952596708031344, + "learning_rate": 9.645e-05, + "loss": 2.2052, + "step": 643 + }, + { + "epoch": 0.4157520981278244, + "grad_norm": 1.8584881942458775, + "learning_rate": 9.659999999999999e-05, + "loss": 2.1695, + "step": 644 + }, + { + "epoch": 0.41639767591994836, + "grad_norm": 2.004757178414477, + "learning_rate": 9.675e-05, + "loss": 2.6507, + "step": 645 + }, + { + "epoch": 0.4170432537120723, + "grad_norm": 2.0064791873189525, + "learning_rate": 9.69e-05, + "loss": 2.8162, + "step": 646 + }, + { + "epoch": 0.4176888315041963, + "grad_norm": 1.847494163917081, + "learning_rate": 9.705e-05, + "loss": 2.6888, + "step": 647 + }, + { + "epoch": 0.4183344092963202, + "grad_norm": 1.8522019691573082, + "learning_rate": 9.719999999999999e-05, + "loss": 2.8591, + "step": 648 + }, + { + "epoch": 0.41897998708844414, + "grad_norm": 1.8061082872918734, + "learning_rate": 9.735e-05, + "loss": 2.335, + "step": 649 + }, + { + "epoch": 0.4196255648805681, + "grad_norm": 1.944944161753651, + "learning_rate": 9.75e-05, + "loss": 2.7004, + "step": 650 + }, + { + "epoch": 0.42027114267269206, + "grad_norm": 1.764641656079715, + "learning_rate": 9.764999999999999e-05, + "loss": 2.3325, + "step": 651 + }, + { + "epoch": 0.420916720464816, + "grad_norm": 3.093838676961851, + "learning_rate": 9.779999999999999e-05, + "loss": 2.8569, + "step": 652 + }, + { + "epoch": 0.42156229825693997, + "grad_norm": 1.8244533083777421, + "learning_rate": 9.795e-05, + "loss": 2.6603, + "step": 653 + }, + { + "epoch": 0.4222078760490639, + "grad_norm": 1.7494285076620568, + "learning_rate": 9.81e-05, + "loss": 2.5363, + "step": 654 + }, + { + "epoch": 0.4228534538411879, + "grad_norm": 2.2007266815217896, + "learning_rate": 9.824999999999999e-05, + "loss": 2.573, + "step": 655 + }, + { + "epoch": 0.4234990316333118, + "grad_norm": 2.000605582138396, + "learning_rate": 9.839999999999999e-05, + "loss": 2.4862, + "step": 656 + }, + { + "epoch": 0.42414460942543575, + "grad_norm": 2.1075510287477877, + "learning_rate": 9.855e-05, + "loss": 2.7832, + "step": 657 + }, + { + "epoch": 0.42479018721755973, + "grad_norm": 1.7809119930629878, + "learning_rate": 9.87e-05, + "loss": 2.7209, + "step": 658 + }, + { + "epoch": 0.42543576500968366, + "grad_norm": 1.9651309187621708, + "learning_rate": 9.884999999999999e-05, + "loss": 2.3267, + "step": 659 + }, + { + "epoch": 0.4260813428018076, + "grad_norm": 2.0171727799642207, + "learning_rate": 9.9e-05, + "loss": 2.8087, + "step": 660 + }, + { + "epoch": 0.4267269205939316, + "grad_norm": 1.8447661837425744, + "learning_rate": 9.915e-05, + "loss": 2.7178, + "step": 661 + }, + { + "epoch": 0.4273724983860555, + "grad_norm": 1.8822702028644263, + "learning_rate": 9.93e-05, + "loss": 2.3746, + "step": 662 + }, + { + "epoch": 0.4280180761781795, + "grad_norm": 1.977760841682139, + "learning_rate": 9.944999999999999e-05, + "loss": 2.7794, + "step": 663 + }, + { + "epoch": 0.4286636539703034, + "grad_norm": 2.2299306951610434, + "learning_rate": 9.96e-05, + "loss": 2.5672, + "step": 664 + }, + { + "epoch": 0.42930923176242736, + "grad_norm": 1.7457737597764509, + "learning_rate": 9.975e-05, + "loss": 2.2048, + "step": 665 + }, + { + "epoch": 0.42995480955455134, + "grad_norm": 1.706504314682569, + "learning_rate": 9.99e-05, + "loss": 2.4817, + "step": 666 + }, + { + "epoch": 0.4306003873466753, + "grad_norm": 2.9169741777551788, + "learning_rate": 0.00010004999999999999, + "loss": 2.9027, + "step": 667 + }, + { + "epoch": 0.4312459651387992, + "grad_norm": 1.8424423367200775, + "learning_rate": 0.0001002, + "loss": 2.5383, + "step": 668 + }, + { + "epoch": 0.4318915429309232, + "grad_norm": 2.4451368558472995, + "learning_rate": 0.00010035, + "loss": 2.5494, + "step": 669 + }, + { + "epoch": 0.4325371207230471, + "grad_norm": 1.9362127841820618, + "learning_rate": 0.0001005, + "loss": 2.6485, + "step": 670 + }, + { + "epoch": 0.4331826985151711, + "grad_norm": 2.443264521833662, + "learning_rate": 0.00010065, + "loss": 2.746, + "step": 671 + }, + { + "epoch": 0.43382827630729504, + "grad_norm": 1.7027989716820697, + "learning_rate": 0.0001008, + "loss": 2.6318, + "step": 672 + }, + { + "epoch": 0.43447385409941897, + "grad_norm": 1.7545770665786746, + "learning_rate": 0.00010095, + "loss": 2.9019, + "step": 673 + }, + { + "epoch": 0.43511943189154295, + "grad_norm": 1.7910826013009322, + "learning_rate": 0.0001011, + "loss": 2.8922, + "step": 674 + }, + { + "epoch": 0.4357650096836669, + "grad_norm": 1.6302430221440305, + "learning_rate": 0.00010125, + "loss": 2.6433, + "step": 675 + }, + { + "epoch": 0.4364105874757908, + "grad_norm": 1.7383914840838062, + "learning_rate": 0.0001014, + "loss": 2.7021, + "step": 676 + }, + { + "epoch": 0.4370561652679148, + "grad_norm": 1.6669186546373884, + "learning_rate": 0.00010155, + "loss": 2.7564, + "step": 677 + }, + { + "epoch": 0.43770174306003873, + "grad_norm": 1.8523155696381406, + "learning_rate": 0.00010169999999999999, + "loss": 2.6962, + "step": 678 + }, + { + "epoch": 0.43834732085216266, + "grad_norm": 1.6313560811407315, + "learning_rate": 0.00010185, + "loss": 2.5167, + "step": 679 + }, + { + "epoch": 0.43899289864428664, + "grad_norm": 1.8686542391933163, + "learning_rate": 0.000102, + "loss": 2.7275, + "step": 680 + }, + { + "epoch": 0.4396384764364106, + "grad_norm": 1.9887253774685407, + "learning_rate": 0.00010215, + "loss": 2.9657, + "step": 681 + }, + { + "epoch": 0.44028405422853456, + "grad_norm": 3.0641267475004317, + "learning_rate": 0.00010229999999999999, + "loss": 2.5893, + "step": 682 + }, + { + "epoch": 0.4409296320206585, + "grad_norm": 2.544589377398497, + "learning_rate": 0.00010245, + "loss": 2.7487, + "step": 683 + }, + { + "epoch": 0.4415752098127824, + "grad_norm": 1.947682264301289, + "learning_rate": 0.0001026, + "loss": 2.7859, + "step": 684 + }, + { + "epoch": 0.4422207876049064, + "grad_norm": 1.7323990405253153, + "learning_rate": 0.00010275, + "loss": 2.6526, + "step": 685 + }, + { + "epoch": 0.44286636539703034, + "grad_norm": 1.7640805203423051, + "learning_rate": 0.0001029, + "loss": 2.239, + "step": 686 + }, + { + "epoch": 0.44351194318915427, + "grad_norm": 1.9264148141886144, + "learning_rate": 0.00010305, + "loss": 2.9157, + "step": 687 + }, + { + "epoch": 0.44415752098127825, + "grad_norm": 1.8569007395470465, + "learning_rate": 0.00010319999999999999, + "loss": 2.7504, + "step": 688 + }, + { + "epoch": 0.4448030987734022, + "grad_norm": 2.0398540715151, + "learning_rate": 0.00010334999999999998, + "loss": 2.9822, + "step": 689 + }, + { + "epoch": 0.44544867656552617, + "grad_norm": 1.7710519998600716, + "learning_rate": 0.00010349999999999998, + "loss": 2.6649, + "step": 690 + }, + { + "epoch": 0.4460942543576501, + "grad_norm": 1.6703152309311113, + "learning_rate": 0.00010364999999999999, + "loss": 2.3173, + "step": 691 + }, + { + "epoch": 0.44673983214977403, + "grad_norm": 1.758374172739052, + "learning_rate": 0.00010379999999999999, + "loss": 2.7417, + "step": 692 + }, + { + "epoch": 0.447385409941898, + "grad_norm": 1.744442063450311, + "learning_rate": 0.00010394999999999998, + "loss": 2.7966, + "step": 693 + }, + { + "epoch": 0.44803098773402195, + "grad_norm": 2.075545088320079, + "learning_rate": 0.00010409999999999998, + "loss": 2.9282, + "step": 694 + }, + { + "epoch": 0.4486765655261459, + "grad_norm": 1.6198298504130906, + "learning_rate": 0.00010424999999999999, + "loss": 2.3483, + "step": 695 + }, + { + "epoch": 0.44932214331826986, + "grad_norm": 1.7433116328804503, + "learning_rate": 0.00010439999999999999, + "loss": 2.558, + "step": 696 + }, + { + "epoch": 0.4499677211103938, + "grad_norm": 1.4553932702187764, + "learning_rate": 0.00010454999999999998, + "loss": 2.2407, + "step": 697 + }, + { + "epoch": 0.4506132989025178, + "grad_norm": 1.8099075911895386, + "learning_rate": 0.00010469999999999998, + "loss": 2.6754, + "step": 698 + }, + { + "epoch": 0.4512588766946417, + "grad_norm": 1.8488292180473327, + "learning_rate": 0.00010484999999999999, + "loss": 2.677, + "step": 699 + }, + { + "epoch": 0.45190445448676564, + "grad_norm": 1.74583236552727, + "learning_rate": 0.00010499999999999999, + "loss": 2.5874, + "step": 700 + }, + { + "epoch": 0.4525500322788896, + "grad_norm": 1.7278304175422037, + "learning_rate": 0.00010514999999999998, + "loss": 2.4917, + "step": 701 + }, + { + "epoch": 0.45319561007101355, + "grad_norm": 2.1492932115048293, + "learning_rate": 0.00010529999999999998, + "loss": 2.9098, + "step": 702 + }, + { + "epoch": 0.4538411878631375, + "grad_norm": 1.7730680431354051, + "learning_rate": 0.00010544999999999999, + "loss": 2.8378, + "step": 703 + }, + { + "epoch": 0.45448676565526147, + "grad_norm": 1.596856366736407, + "learning_rate": 0.00010559999999999998, + "loss": 2.8892, + "step": 704 + }, + { + "epoch": 0.4551323434473854, + "grad_norm": 1.817523202201856, + "learning_rate": 0.00010574999999999998, + "loss": 2.6785, + "step": 705 + }, + { + "epoch": 0.4557779212395094, + "grad_norm": 3.298495522820333, + "learning_rate": 0.00010589999999999999, + "loss": 2.4051, + "step": 706 + }, + { + "epoch": 0.4564234990316333, + "grad_norm": 2.0250260307206207, + "learning_rate": 0.00010604999999999999, + "loss": 2.5606, + "step": 707 + }, + { + "epoch": 0.45706907682375725, + "grad_norm": 2.0877187560888513, + "learning_rate": 0.00010619999999999998, + "loss": 2.7408, + "step": 708 + }, + { + "epoch": 0.45771465461588123, + "grad_norm": 2.000039839843237, + "learning_rate": 0.00010634999999999998, + "loss": 2.9311, + "step": 709 + }, + { + "epoch": 0.45836023240800516, + "grad_norm": 2.118462058786124, + "learning_rate": 0.00010649999999999999, + "loss": 2.6935, + "step": 710 + }, + { + "epoch": 0.4590058102001291, + "grad_norm": 1.9186721809149634, + "learning_rate": 0.00010664999999999999, + "loss": 2.2237, + "step": 711 + }, + { + "epoch": 0.4596513879922531, + "grad_norm": 1.9870644075112482, + "learning_rate": 0.00010679999999999998, + "loss": 2.5015, + "step": 712 + }, + { + "epoch": 0.460296965784377, + "grad_norm": 1.8580917790736249, + "learning_rate": 0.00010694999999999998, + "loss": 2.6493, + "step": 713 + }, + { + "epoch": 0.460942543576501, + "grad_norm": 1.6910322206897994, + "learning_rate": 0.00010709999999999999, + "loss": 2.2033, + "step": 714 + }, + { + "epoch": 0.4615881213686249, + "grad_norm": 2.309718132666112, + "learning_rate": 0.00010724999999999999, + "loss": 2.7156, + "step": 715 + }, + { + "epoch": 0.46223369916074886, + "grad_norm": 2.173010367140145, + "learning_rate": 0.00010739999999999998, + "loss": 2.6773, + "step": 716 + }, + { + "epoch": 0.46287927695287284, + "grad_norm": 1.7361958049129245, + "learning_rate": 0.00010754999999999999, + "loss": 2.8215, + "step": 717 + }, + { + "epoch": 0.4635248547449968, + "grad_norm": 2.0025838624922456, + "learning_rate": 0.00010769999999999999, + "loss": 2.6651, + "step": 718 + }, + { + "epoch": 0.4641704325371207, + "grad_norm": 1.7688255342683195, + "learning_rate": 0.00010784999999999999, + "loss": 2.3457, + "step": 719 + }, + { + "epoch": 0.4648160103292447, + "grad_norm": 1.784301990463857, + "learning_rate": 0.00010799999999999998, + "loss": 2.2777, + "step": 720 + }, + { + "epoch": 0.4654615881213686, + "grad_norm": 2.6798256048326916, + "learning_rate": 0.00010814999999999999, + "loss": 3.0707, + "step": 721 + }, + { + "epoch": 0.46610716591349255, + "grad_norm": 1.6934330506568656, + "learning_rate": 0.00010829999999999999, + "loss": 2.5493, + "step": 722 + }, + { + "epoch": 0.46675274370561654, + "grad_norm": 2.1801550629079056, + "learning_rate": 0.00010845, + "loss": 2.6502, + "step": 723 + }, + { + "epoch": 0.46739832149774047, + "grad_norm": 1.8478769604231835, + "learning_rate": 0.00010859999999999998, + "loss": 2.6467, + "step": 724 + }, + { + "epoch": 0.46804389928986445, + "grad_norm": 1.8144004584180584, + "learning_rate": 0.00010874999999999999, + "loss": 2.6004, + "step": 725 + }, + { + "epoch": 0.4686894770819884, + "grad_norm": 1.6669236525005728, + "learning_rate": 0.00010889999999999999, + "loss": 2.701, + "step": 726 + }, + { + "epoch": 0.4693350548741123, + "grad_norm": 1.521608037146397, + "learning_rate": 0.00010904999999999998, + "loss": 2.6477, + "step": 727 + }, + { + "epoch": 0.4699806326662363, + "grad_norm": 1.7281185355323214, + "learning_rate": 0.00010919999999999998, + "loss": 2.5244, + "step": 728 + }, + { + "epoch": 0.47062621045836023, + "grad_norm": 1.9049202399068565, + "learning_rate": 0.00010934999999999999, + "loss": 2.9117, + "step": 729 + }, + { + "epoch": 0.47127178825048416, + "grad_norm": 1.7510202418725118, + "learning_rate": 0.00010949999999999999, + "loss": 2.2644, + "step": 730 + }, + { + "epoch": 0.47191736604260814, + "grad_norm": 1.6169278267531209, + "learning_rate": 0.00010964999999999998, + "loss": 2.5378, + "step": 731 + }, + { + "epoch": 0.4725629438347321, + "grad_norm": 1.8253998311335986, + "learning_rate": 0.00010979999999999999, + "loss": 2.8713, + "step": 732 + }, + { + "epoch": 0.47320852162685606, + "grad_norm": 1.7624981148102696, + "learning_rate": 0.00010994999999999999, + "loss": 2.7222, + "step": 733 + }, + { + "epoch": 0.47385409941898, + "grad_norm": 1.726006009967735, + "learning_rate": 0.00011009999999999999, + "loss": 2.6366, + "step": 734 + }, + { + "epoch": 0.4744996772111039, + "grad_norm": 1.6784179170813645, + "learning_rate": 0.00011024999999999998, + "loss": 2.2836, + "step": 735 + }, + { + "epoch": 0.4751452550032279, + "grad_norm": 1.8955219988115697, + "learning_rate": 0.00011039999999999999, + "loss": 2.7563, + "step": 736 + }, + { + "epoch": 0.47579083279535184, + "grad_norm": 1.8158425616527474, + "learning_rate": 0.00011054999999999999, + "loss": 2.7556, + "step": 737 + }, + { + "epoch": 0.47643641058747577, + "grad_norm": 1.681098909127252, + "learning_rate": 0.0001107, + "loss": 2.6078, + "step": 738 + }, + { + "epoch": 0.47708198837959975, + "grad_norm": 1.821303174937621, + "learning_rate": 0.00011084999999999998, + "loss": 2.38, + "step": 739 + }, + { + "epoch": 0.4777275661717237, + "grad_norm": 1.8780086078913352, + "learning_rate": 0.00011099999999999999, + "loss": 2.6456, + "step": 740 + }, + { + "epoch": 0.47837314396384767, + "grad_norm": 1.8443114148113362, + "learning_rate": 0.00011114999999999999, + "loss": 2.7543, + "step": 741 + }, + { + "epoch": 0.4790187217559716, + "grad_norm": 1.8585523366534837, + "learning_rate": 0.0001113, + "loss": 2.7582, + "step": 742 + }, + { + "epoch": 0.47966429954809553, + "grad_norm": 1.6163729209345974, + "learning_rate": 0.00011144999999999998, + "loss": 2.7225, + "step": 743 + }, + { + "epoch": 0.4803098773402195, + "grad_norm": 1.6825773223048284, + "learning_rate": 0.00011159999999999999, + "loss": 2.5912, + "step": 744 + }, + { + "epoch": 0.48095545513234345, + "grad_norm": 1.6671517801198956, + "learning_rate": 0.00011174999999999999, + "loss": 2.6675, + "step": 745 + }, + { + "epoch": 0.4816010329244674, + "grad_norm": 1.7229467322916734, + "learning_rate": 0.0001119, + "loss": 2.7966, + "step": 746 + }, + { + "epoch": 0.48224661071659136, + "grad_norm": 1.7032107400482, + "learning_rate": 0.00011204999999999999, + "loss": 2.5527, + "step": 747 + }, + { + "epoch": 0.4828921885087153, + "grad_norm": 1.7142078479342544, + "learning_rate": 0.00011219999999999999, + "loss": 2.8434, + "step": 748 + }, + { + "epoch": 0.4835377663008393, + "grad_norm": 1.6377581612680674, + "learning_rate": 0.00011235, + "loss": 2.7951, + "step": 749 + }, + { + "epoch": 0.4841833440929632, + "grad_norm": 1.5840384058283647, + "learning_rate": 0.0001125, + "loss": 2.3922, + "step": 750 + }, + { + "epoch": 0.48482892188508714, + "grad_norm": 1.6025725113760794, + "learning_rate": 0.00011264999999999999, + "loss": 2.5933, + "step": 751 + }, + { + "epoch": 0.4854744996772111, + "grad_norm": 1.7557804685841265, + "learning_rate": 0.00011279999999999999, + "loss": 2.6053, + "step": 752 + }, + { + "epoch": 0.48612007746933505, + "grad_norm": 1.906132436531714, + "learning_rate": 0.00011295, + "loss": 2.6447, + "step": 753 + }, + { + "epoch": 0.486765655261459, + "grad_norm": 1.7243332152849902, + "learning_rate": 0.00011309999999999998, + "loss": 2.7109, + "step": 754 + }, + { + "epoch": 0.48741123305358297, + "grad_norm": 1.6466705750185195, + "learning_rate": 0.00011324999999999999, + "loss": 2.6173, + "step": 755 + }, + { + "epoch": 0.4880568108457069, + "grad_norm": 1.9001179161414, + "learning_rate": 0.00011339999999999999, + "loss": 2.9063, + "step": 756 + }, + { + "epoch": 0.48870238863783083, + "grad_norm": 1.7677135136794684, + "learning_rate": 0.00011355, + "loss": 2.7034, + "step": 757 + }, + { + "epoch": 0.4893479664299548, + "grad_norm": 1.5556317322382922, + "learning_rate": 0.00011369999999999999, + "loss": 2.2938, + "step": 758 + }, + { + "epoch": 0.48999354422207875, + "grad_norm": 1.5602847633559327, + "learning_rate": 0.00011384999999999999, + "loss": 2.5058, + "step": 759 + }, + { + "epoch": 0.49063912201420273, + "grad_norm": 1.583416747882427, + "learning_rate": 0.00011399999999999999, + "loss": 2.562, + "step": 760 + }, + { + "epoch": 0.49128469980632666, + "grad_norm": 1.7393539481731235, + "learning_rate": 0.00011415, + "loss": 2.7561, + "step": 761 + }, + { + "epoch": 0.4919302775984506, + "grad_norm": 1.8279660649415965, + "learning_rate": 0.00011429999999999999, + "loss": 2.2557, + "step": 762 + }, + { + "epoch": 0.4925758553905746, + "grad_norm": 1.8701434867801539, + "learning_rate": 0.00011444999999999999, + "loss": 2.6487, + "step": 763 + }, + { + "epoch": 0.4932214331826985, + "grad_norm": 1.7537899884685872, + "learning_rate": 0.0001146, + "loss": 2.6469, + "step": 764 + }, + { + "epoch": 0.49386701097482244, + "grad_norm": 1.6494599831528551, + "learning_rate": 0.00011475, + "loss": 2.6908, + "step": 765 + }, + { + "epoch": 0.4945125887669464, + "grad_norm": 1.6341117699556564, + "learning_rate": 0.00011489999999999999, + "loss": 2.7727, + "step": 766 + }, + { + "epoch": 0.49515816655907036, + "grad_norm": 1.9517119561204352, + "learning_rate": 0.00011504999999999999, + "loss": 2.9139, + "step": 767 + }, + { + "epoch": 0.49580374435119434, + "grad_norm": 1.6057128700237562, + "learning_rate": 0.0001152, + "loss": 2.6245, + "step": 768 + }, + { + "epoch": 0.4964493221433183, + "grad_norm": 1.608991136889918, + "learning_rate": 0.00011535, + "loss": 2.5673, + "step": 769 + }, + { + "epoch": 0.4970948999354422, + "grad_norm": 1.6013265971311197, + "learning_rate": 0.00011549999999999999, + "loss": 2.5942, + "step": 770 + }, + { + "epoch": 0.4977404777275662, + "grad_norm": 1.7728466478693656, + "learning_rate": 0.00011564999999999999, + "loss": 2.5179, + "step": 771 + }, + { + "epoch": 0.4983860555196901, + "grad_norm": 1.6395605745892459, + "learning_rate": 0.0001158, + "loss": 2.6409, + "step": 772 + }, + { + "epoch": 0.49903163331181405, + "grad_norm": 1.8384907233290346, + "learning_rate": 0.00011595, + "loss": 2.5998, + "step": 773 + }, + { + "epoch": 0.49967721110393803, + "grad_norm": 1.6941066957543256, + "learning_rate": 0.00011609999999999999, + "loss": 2.7217, + "step": 774 + }, + { + "epoch": 0.500322788896062, + "grad_norm": 1.6900248143428558, + "learning_rate": 0.00011624999999999999, + "loss": 2.8201, + "step": 775 + }, + { + "epoch": 0.5009683666881859, + "grad_norm": 1.5733827194016023, + "learning_rate": 0.0001164, + "loss": 2.4814, + "step": 776 + }, + { + "epoch": 0.5016139444803098, + "grad_norm": 1.9925697964933522, + "learning_rate": 0.00011654999999999999, + "loss": 2.7803, + "step": 777 + }, + { + "epoch": 0.5022595222724339, + "grad_norm": 1.7184695553486675, + "learning_rate": 0.00011669999999999999, + "loss": 2.56, + "step": 778 + }, + { + "epoch": 0.5029051000645578, + "grad_norm": 1.739435938045543, + "learning_rate": 0.00011685, + "loss": 2.7095, + "step": 779 + }, + { + "epoch": 0.5035506778566817, + "grad_norm": 1.5727019766296693, + "learning_rate": 0.000117, + "loss": 2.5739, + "step": 780 + }, + { + "epoch": 0.5041962556488057, + "grad_norm": 1.7505147956040508, + "learning_rate": 0.00011714999999999999, + "loss": 2.6611, + "step": 781 + }, + { + "epoch": 0.5048418334409296, + "grad_norm": 1.6709132462667822, + "learning_rate": 0.00011729999999999999, + "loss": 2.6945, + "step": 782 + }, + { + "epoch": 0.5054874112330536, + "grad_norm": 1.7950134965990898, + "learning_rate": 0.00011745, + "loss": 2.7567, + "step": 783 + }, + { + "epoch": 0.5061329890251776, + "grad_norm": 1.7070450894767644, + "learning_rate": 0.0001176, + "loss": 2.7467, + "step": 784 + }, + { + "epoch": 0.5067785668173015, + "grad_norm": 1.5119844016619008, + "learning_rate": 0.00011774999999999999, + "loss": 2.6955, + "step": 785 + }, + { + "epoch": 0.5074241446094254, + "grad_norm": 1.506742622856106, + "learning_rate": 0.00011789999999999999, + "loss": 2.6756, + "step": 786 + }, + { + "epoch": 0.5080697224015494, + "grad_norm": 2.0744164738121107, + "learning_rate": 0.00011805, + "loss": 2.7071, + "step": 787 + }, + { + "epoch": 0.5087153001936734, + "grad_norm": 1.6304878649864591, + "learning_rate": 0.0001182, + "loss": 2.638, + "step": 788 + }, + { + "epoch": 0.5093608779857973, + "grad_norm": 1.5795639343323595, + "learning_rate": 0.00011834999999999999, + "loss": 2.7533, + "step": 789 + }, + { + "epoch": 0.5100064557779213, + "grad_norm": 1.5477984454638232, + "learning_rate": 0.0001185, + "loss": 2.6171, + "step": 790 + }, + { + "epoch": 0.5106520335700452, + "grad_norm": 1.6757395556948282, + "learning_rate": 0.00011865, + "loss": 2.5415, + "step": 791 + }, + { + "epoch": 0.5112976113621691, + "grad_norm": 1.680450506077003, + "learning_rate": 0.0001188, + "loss": 2.7366, + "step": 792 + }, + { + "epoch": 0.511943189154293, + "grad_norm": 1.683810360953093, + "learning_rate": 0.00011894999999999999, + "loss": 2.6091, + "step": 793 + }, + { + "epoch": 0.5125887669464171, + "grad_norm": 1.6364016905573067, + "learning_rate": 0.0001191, + "loss": 2.7486, + "step": 794 + }, + { + "epoch": 0.513234344738541, + "grad_norm": 1.7538231845267327, + "learning_rate": 0.00011925, + "loss": 2.5802, + "step": 795 + }, + { + "epoch": 0.513879922530665, + "grad_norm": 1.8897151077600696, + "learning_rate": 0.0001194, + "loss": 2.8539, + "step": 796 + }, + { + "epoch": 0.5145255003227889, + "grad_norm": 1.631248319095047, + "learning_rate": 0.00011954999999999999, + "loss": 2.5636, + "step": 797 + }, + { + "epoch": 0.5151710781149128, + "grad_norm": 1.6455547996447053, + "learning_rate": 0.0001197, + "loss": 2.4759, + "step": 798 + }, + { + "epoch": 0.5158166559070368, + "grad_norm": 1.7603678514144165, + "learning_rate": 0.00011985, + "loss": 2.5864, + "step": 799 + }, + { + "epoch": 0.5164622336991608, + "grad_norm": 1.7453257067171424, + "learning_rate": 0.00011999999999999999, + "loss": 2.6443, + "step": 800 + }, + { + "epoch": 0.5171078114912847, + "grad_norm": 1.647392230818976, + "learning_rate": 0.00012014999999999999, + "loss": 2.6949, + "step": 801 + }, + { + "epoch": 0.5177533892834086, + "grad_norm": 1.711358956686134, + "learning_rate": 0.0001203, + "loss": 2.7317, + "step": 802 + }, + { + "epoch": 0.5183989670755326, + "grad_norm": 1.685530170973663, + "learning_rate": 0.00012045, + "loss": 2.2193, + "step": 803 + }, + { + "epoch": 0.5190445448676565, + "grad_norm": 1.6422445277572062, + "learning_rate": 0.00012059999999999999, + "loss": 2.5555, + "step": 804 + }, + { + "epoch": 0.5196901226597805, + "grad_norm": 1.5701886241027716, + "learning_rate": 0.00012075, + "loss": 2.6172, + "step": 805 + }, + { + "epoch": 0.5203357004519045, + "grad_norm": 1.6769054450497616, + "learning_rate": 0.0001209, + "loss": 2.8786, + "step": 806 + }, + { + "epoch": 0.5209812782440284, + "grad_norm": 1.7385378588137814, + "learning_rate": 0.00012105, + "loss": 2.6595, + "step": 807 + }, + { + "epoch": 0.5216268560361523, + "grad_norm": 1.6187628143471462, + "learning_rate": 0.00012119999999999999, + "loss": 2.6391, + "step": 808 + }, + { + "epoch": 0.5222724338282763, + "grad_norm": 1.5710234955493796, + "learning_rate": 0.00012135, + "loss": 2.8818, + "step": 809 + }, + { + "epoch": 0.5229180116204003, + "grad_norm": 1.545489335668764, + "learning_rate": 0.0001215, + "loss": 2.6935, + "step": 810 + }, + { + "epoch": 0.5235635894125242, + "grad_norm": 1.539342581001415, + "learning_rate": 0.00012165, + "loss": 2.5546, + "step": 811 + }, + { + "epoch": 0.5242091672046482, + "grad_norm": 1.826811835310975, + "learning_rate": 0.00012179999999999999, + "loss": 2.7232, + "step": 812 + }, + { + "epoch": 0.5248547449967721, + "grad_norm": 1.5604584174208405, + "learning_rate": 0.00012194999999999998, + "loss": 2.5741, + "step": 813 + }, + { + "epoch": 0.525500322788896, + "grad_norm": 1.6216431671383447, + "learning_rate": 0.00012209999999999999, + "loss": 2.3206, + "step": 814 + }, + { + "epoch": 0.5261459005810201, + "grad_norm": 1.5384241169128043, + "learning_rate": 0.00012225, + "loss": 2.7503, + "step": 815 + }, + { + "epoch": 0.526791478373144, + "grad_norm": 1.4063237571849672, + "learning_rate": 0.0001224, + "loss": 2.1323, + "step": 816 + }, + { + "epoch": 0.5274370561652679, + "grad_norm": 1.5819751168165743, + "learning_rate": 0.00012254999999999997, + "loss": 2.5583, + "step": 817 + }, + { + "epoch": 0.5280826339573919, + "grad_norm": 1.555871879116111, + "learning_rate": 0.00012269999999999997, + "loss": 2.1716, + "step": 818 + }, + { + "epoch": 0.5287282117495158, + "grad_norm": 1.9701544359924197, + "learning_rate": 0.00012284999999999998, + "loss": 2.7643, + "step": 819 + }, + { + "epoch": 0.5293737895416397, + "grad_norm": 1.653061475349606, + "learning_rate": 0.00012299999999999998, + "loss": 2.5749, + "step": 820 + }, + { + "epoch": 0.5300193673337638, + "grad_norm": 1.6159636672989834, + "learning_rate": 0.00012314999999999998, + "loss": 2.7058, + "step": 821 + }, + { + "epoch": 0.5306649451258877, + "grad_norm": 1.512589979993954, + "learning_rate": 0.0001233, + "loss": 2.5379, + "step": 822 + }, + { + "epoch": 0.5313105229180116, + "grad_norm": 1.46452544633425, + "learning_rate": 0.00012345, + "loss": 2.6603, + "step": 823 + }, + { + "epoch": 0.5319561007101355, + "grad_norm": 1.745913351119141, + "learning_rate": 0.0001236, + "loss": 2.5524, + "step": 824 + }, + { + "epoch": 0.5326016785022595, + "grad_norm": 1.618701452096863, + "learning_rate": 0.00012374999999999997, + "loss": 2.529, + "step": 825 + }, + { + "epoch": 0.5332472562943835, + "grad_norm": 1.7262581997813364, + "learning_rate": 0.00012389999999999998, + "loss": 2.6038, + "step": 826 + }, + { + "epoch": 0.5338928340865075, + "grad_norm": 1.747103298288265, + "learning_rate": 0.00012404999999999998, + "loss": 2.5405, + "step": 827 + }, + { + "epoch": 0.5345384118786314, + "grad_norm": 1.6719912361412865, + "learning_rate": 0.00012419999999999998, + "loss": 2.8464, + "step": 828 + }, + { + "epoch": 0.5351839896707553, + "grad_norm": 1.6380568791171368, + "learning_rate": 0.00012435, + "loss": 2.5627, + "step": 829 + }, + { + "epoch": 0.5358295674628792, + "grad_norm": 1.5712447901101845, + "learning_rate": 0.0001245, + "loss": 2.6354, + "step": 830 + }, + { + "epoch": 0.5364751452550033, + "grad_norm": 1.6072623113113458, + "learning_rate": 0.00012465, + "loss": 2.6815, + "step": 831 + }, + { + "epoch": 0.5371207230471272, + "grad_norm": 1.6916780136353669, + "learning_rate": 0.00012479999999999997, + "loss": 2.8679, + "step": 832 + }, + { + "epoch": 0.5377663008392511, + "grad_norm": 2.5584431812323136, + "learning_rate": 0.00012494999999999997, + "loss": 2.6059, + "step": 833 + }, + { + "epoch": 0.5384118786313751, + "grad_norm": 1.564979854950134, + "learning_rate": 0.00012509999999999998, + "loss": 2.427, + "step": 834 + }, + { + "epoch": 0.539057456423499, + "grad_norm": 2.2170988926905077, + "learning_rate": 0.00012524999999999998, + "loss": 2.7143, + "step": 835 + }, + { + "epoch": 0.5397030342156229, + "grad_norm": 1.5584573730863072, + "learning_rate": 0.00012539999999999999, + "loss": 2.6927, + "step": 836 + }, + { + "epoch": 0.540348612007747, + "grad_norm": 1.6343382722202735, + "learning_rate": 0.00012555, + "loss": 2.5505, + "step": 837 + }, + { + "epoch": 0.5409941897998709, + "grad_norm": 1.6493446040190092, + "learning_rate": 0.0001257, + "loss": 2.5789, + "step": 838 + }, + { + "epoch": 0.5416397675919948, + "grad_norm": 1.6116237170097356, + "learning_rate": 0.00012585, + "loss": 2.1655, + "step": 839 + }, + { + "epoch": 0.5422853453841188, + "grad_norm": 1.5572044853742515, + "learning_rate": 0.00012599999999999997, + "loss": 2.5538, + "step": 840 + }, + { + "epoch": 0.5429309231762427, + "grad_norm": 1.5889802713872758, + "learning_rate": 0.00012614999999999998, + "loss": 2.4014, + "step": 841 + }, + { + "epoch": 0.5435765009683667, + "grad_norm": 1.807286750106585, + "learning_rate": 0.00012629999999999998, + "loss": 2.4062, + "step": 842 + }, + { + "epoch": 0.5442220787604907, + "grad_norm": 1.4691540425109675, + "learning_rate": 0.00012644999999999998, + "loss": 2.2214, + "step": 843 + }, + { + "epoch": 0.5448676565526146, + "grad_norm": 1.5115236031909682, + "learning_rate": 0.0001266, + "loss": 2.7622, + "step": 844 + }, + { + "epoch": 0.5455132343447385, + "grad_norm": 1.8341943063323602, + "learning_rate": 0.00012675, + "loss": 2.2462, + "step": 845 + }, + { + "epoch": 0.5461588121368625, + "grad_norm": 1.6665912572684731, + "learning_rate": 0.0001269, + "loss": 2.7263, + "step": 846 + }, + { + "epoch": 0.5468043899289864, + "grad_norm": 1.6554027303545011, + "learning_rate": 0.00012705, + "loss": 2.4898, + "step": 847 + }, + { + "epoch": 0.5474499677211104, + "grad_norm": 1.495300046304717, + "learning_rate": 0.00012719999999999997, + "loss": 2.6689, + "step": 848 + }, + { + "epoch": 0.5480955455132344, + "grad_norm": 1.8333560875341781, + "learning_rate": 0.00012734999999999998, + "loss": 2.8533, + "step": 849 + }, + { + "epoch": 0.5487411233053583, + "grad_norm": 1.7127502560945422, + "learning_rate": 0.00012749999999999998, + "loss": 2.3615, + "step": 850 + }, + { + "epoch": 0.5493867010974822, + "grad_norm": 1.5629221742050174, + "learning_rate": 0.00012764999999999999, + "loss": 2.539, + "step": 851 + }, + { + "epoch": 0.5500322788896062, + "grad_norm": 1.6475620816451761, + "learning_rate": 0.0001278, + "loss": 2.6028, + "step": 852 + }, + { + "epoch": 0.5506778566817302, + "grad_norm": 1.814788576717076, + "learning_rate": 0.00012795, + "loss": 2.8993, + "step": 853 + }, + { + "epoch": 0.5513234344738541, + "grad_norm": 1.6090894155785864, + "learning_rate": 0.0001281, + "loss": 2.6576, + "step": 854 + }, + { + "epoch": 0.551969012265978, + "grad_norm": 1.486602610838201, + "learning_rate": 0.00012824999999999997, + "loss": 2.4454, + "step": 855 + }, + { + "epoch": 0.552614590058102, + "grad_norm": 1.6884049879857186, + "learning_rate": 0.00012839999999999998, + "loss": 2.7625, + "step": 856 + }, + { + "epoch": 0.5532601678502259, + "grad_norm": 1.4927243305971576, + "learning_rate": 0.00012854999999999998, + "loss": 2.3102, + "step": 857 + }, + { + "epoch": 0.55390574564235, + "grad_norm": 1.608512762141428, + "learning_rate": 0.00012869999999999998, + "loss": 2.0426, + "step": 858 + }, + { + "epoch": 0.5545513234344739, + "grad_norm": 1.7566552896362981, + "learning_rate": 0.00012885, + "loss": 2.8751, + "step": 859 + }, + { + "epoch": 0.5551969012265978, + "grad_norm": 1.630593350182867, + "learning_rate": 0.000129, + "loss": 2.6667, + "step": 860 + }, + { + "epoch": 0.5558424790187217, + "grad_norm": 1.4312292702533291, + "learning_rate": 0.00012915, + "loss": 2.2717, + "step": 861 + }, + { + "epoch": 0.5564880568108457, + "grad_norm": 1.5981301956987934, + "learning_rate": 0.0001293, + "loss": 2.4677, + "step": 862 + }, + { + "epoch": 0.5571336346029696, + "grad_norm": 1.525500131066523, + "learning_rate": 0.00012944999999999998, + "loss": 2.3836, + "step": 863 + }, + { + "epoch": 0.5577792123950936, + "grad_norm": 1.7126507113256495, + "learning_rate": 0.00012959999999999998, + "loss": 2.7017, + "step": 864 + }, + { + "epoch": 0.5584247901872176, + "grad_norm": 1.6270676904296588, + "learning_rate": 0.00012974999999999998, + "loss": 2.6487, + "step": 865 + }, + { + "epoch": 0.5590703679793415, + "grad_norm": 1.5531544312741736, + "learning_rate": 0.00012989999999999999, + "loss": 2.577, + "step": 866 + }, + { + "epoch": 0.5597159457714654, + "grad_norm": 1.7556637898232372, + "learning_rate": 0.00013005, + "loss": 2.6679, + "step": 867 + }, + { + "epoch": 0.5603615235635894, + "grad_norm": 1.9096576723088026, + "learning_rate": 0.0001302, + "loss": 2.7424, + "step": 868 + }, + { + "epoch": 0.5610071013557134, + "grad_norm": 1.5427752654755646, + "learning_rate": 0.00013035, + "loss": 2.6345, + "step": 869 + }, + { + "epoch": 0.5616526791478373, + "grad_norm": 1.7105830063940555, + "learning_rate": 0.0001305, + "loss": 2.6482, + "step": 870 + }, + { + "epoch": 0.5622982569399613, + "grad_norm": 1.796286870935815, + "learning_rate": 0.00013064999999999998, + "loss": 2.2732, + "step": 871 + }, + { + "epoch": 0.5629438347320852, + "grad_norm": 3.226750230091171, + "learning_rate": 0.00013079999999999998, + "loss": 2.6392, + "step": 872 + }, + { + "epoch": 0.5635894125242091, + "grad_norm": 1.7254308927105284, + "learning_rate": 0.00013094999999999998, + "loss": 2.5402, + "step": 873 + }, + { + "epoch": 0.5642349903163331, + "grad_norm": 2.0433485399038833, + "learning_rate": 0.0001311, + "loss": 2.5538, + "step": 874 + }, + { + "epoch": 0.5648805681084571, + "grad_norm": 1.5946667608109044, + "learning_rate": 0.00013125, + "loss": 2.5645, + "step": 875 + }, + { + "epoch": 0.565526145900581, + "grad_norm": 1.8867735034199387, + "learning_rate": 0.0001314, + "loss": 2.5179, + "step": 876 + }, + { + "epoch": 0.566171723692705, + "grad_norm": 1.738211511584942, + "learning_rate": 0.00013155, + "loss": 2.6168, + "step": 877 + }, + { + "epoch": 0.5668173014848289, + "grad_norm": 1.774095464965515, + "learning_rate": 0.00013169999999999998, + "loss": 2.5787, + "step": 878 + }, + { + "epoch": 0.5674628792769528, + "grad_norm": 1.8920136341779146, + "learning_rate": 0.00013184999999999998, + "loss": 2.6517, + "step": 879 + }, + { + "epoch": 0.5681084570690769, + "grad_norm": 2.2451112281322896, + "learning_rate": 0.00013199999999999998, + "loss": 2.5672, + "step": 880 + }, + { + "epoch": 0.5687540348612008, + "grad_norm": 1.49591521999586, + "learning_rate": 0.00013215, + "loss": 2.4474, + "step": 881 + }, + { + "epoch": 0.5693996126533247, + "grad_norm": 1.61010931157016, + "learning_rate": 0.0001323, + "loss": 2.683, + "step": 882 + }, + { + "epoch": 0.5700451904454487, + "grad_norm": 1.4688358791912628, + "learning_rate": 0.00013245, + "loss": 2.5202, + "step": 883 + }, + { + "epoch": 0.5706907682375726, + "grad_norm": 1.784749512971316, + "learning_rate": 0.0001326, + "loss": 2.7647, + "step": 884 + }, + { + "epoch": 0.5713363460296966, + "grad_norm": 1.6275206600172611, + "learning_rate": 0.00013275, + "loss": 2.6727, + "step": 885 + }, + { + "epoch": 0.5719819238218206, + "grad_norm": 1.5046180297883816, + "learning_rate": 0.00013289999999999998, + "loss": 2.5693, + "step": 886 + }, + { + "epoch": 0.5726275016139445, + "grad_norm": 1.4643296082293384, + "learning_rate": 0.00013304999999999998, + "loss": 2.6698, + "step": 887 + }, + { + "epoch": 0.5732730794060684, + "grad_norm": 1.9502428828404614, + "learning_rate": 0.00013319999999999999, + "loss": 2.6144, + "step": 888 + }, + { + "epoch": 0.5739186571981924, + "grad_norm": 1.5333416144700966, + "learning_rate": 0.00013335, + "loss": 2.1469, + "step": 889 + }, + { + "epoch": 0.5745642349903163, + "grad_norm": 1.4489755112382607, + "learning_rate": 0.0001335, + "loss": 2.7234, + "step": 890 + }, + { + "epoch": 0.5752098127824403, + "grad_norm": 1.8250390650648611, + "learning_rate": 0.00013365, + "loss": 2.6251, + "step": 891 + }, + { + "epoch": 0.5758553905745643, + "grad_norm": 1.6913537867112776, + "learning_rate": 0.0001338, + "loss": 2.629, + "step": 892 + }, + { + "epoch": 0.5765009683666882, + "grad_norm": 1.9507964243586777, + "learning_rate": 0.00013395, + "loss": 2.541, + "step": 893 + }, + { + "epoch": 0.5771465461588121, + "grad_norm": 2.099707269607654, + "learning_rate": 0.00013409999999999998, + "loss": 2.3994, + "step": 894 + }, + { + "epoch": 0.577792123950936, + "grad_norm": 1.993123602049088, + "learning_rate": 0.00013424999999999998, + "loss": 2.5771, + "step": 895 + }, + { + "epoch": 0.5784377017430601, + "grad_norm": 1.8177675508253615, + "learning_rate": 0.0001344, + "loss": 2.6501, + "step": 896 + }, + { + "epoch": 0.579083279535184, + "grad_norm": 1.6288888095118923, + "learning_rate": 0.00013455, + "loss": 2.5682, + "step": 897 + }, + { + "epoch": 0.579728857327308, + "grad_norm": 1.5784105237482897, + "learning_rate": 0.0001347, + "loss": 2.5827, + "step": 898 + }, + { + "epoch": 0.5803744351194319, + "grad_norm": 1.572219394371304, + "learning_rate": 0.00013485, + "loss": 2.6137, + "step": 899 + }, + { + "epoch": 0.5810200129115558, + "grad_norm": 2.072043042979017, + "learning_rate": 0.000135, + "loss": 2.689, + "step": 900 + }, + { + "epoch": 0.5816655907036798, + "grad_norm": 2.2408837048035535, + "learning_rate": 0.00013514999999999998, + "loss": 2.8257, + "step": 901 + }, + { + "epoch": 0.5823111684958038, + "grad_norm": 2.282522481756746, + "learning_rate": 0.00013529999999999998, + "loss": 2.3572, + "step": 902 + }, + { + "epoch": 0.5829567462879277, + "grad_norm": 1.8910146935799939, + "learning_rate": 0.00013544999999999999, + "loss": 2.5922, + "step": 903 + }, + { + "epoch": 0.5836023240800516, + "grad_norm": 2.8959859745454546, + "learning_rate": 0.0001356, + "loss": 2.2256, + "step": 904 + }, + { + "epoch": 0.5842479018721756, + "grad_norm": 4.860773282200199, + "learning_rate": 0.00013575, + "loss": 2.6958, + "step": 905 + }, + { + "epoch": 0.5848934796642995, + "grad_norm": 1.7324551915626587, + "learning_rate": 0.0001359, + "loss": 2.6219, + "step": 906 + }, + { + "epoch": 0.5855390574564235, + "grad_norm": 1.588646106885282, + "learning_rate": 0.00013605, + "loss": 2.8384, + "step": 907 + }, + { + "epoch": 0.5861846352485475, + "grad_norm": 1.5644242252150375, + "learning_rate": 0.0001362, + "loss": 2.8132, + "step": 908 + }, + { + "epoch": 0.5868302130406714, + "grad_norm": 1.4327842493764165, + "learning_rate": 0.00013634999999999998, + "loss": 2.6545, + "step": 909 + }, + { + "epoch": 0.5874757908327953, + "grad_norm": 1.6368478886093698, + "learning_rate": 0.00013649999999999998, + "loss": 2.7984, + "step": 910 + }, + { + "epoch": 0.5881213686249193, + "grad_norm": 1.474221381986217, + "learning_rate": 0.00013665, + "loss": 2.7855, + "step": 911 + }, + { + "epoch": 0.5887669464170433, + "grad_norm": 1.5221625655929418, + "learning_rate": 0.0001368, + "loss": 2.6027, + "step": 912 + }, + { + "epoch": 0.5894125242091672, + "grad_norm": 1.4675920231969608, + "learning_rate": 0.00013695, + "loss": 2.7567, + "step": 913 + }, + { + "epoch": 0.5900581020012912, + "grad_norm": 1.3226425344712833, + "learning_rate": 0.0001371, + "loss": 2.5716, + "step": 914 + }, + { + "epoch": 0.5907036797934151, + "grad_norm": 1.524419868011999, + "learning_rate": 0.00013725, + "loss": 2.6934, + "step": 915 + }, + { + "epoch": 0.591349257585539, + "grad_norm": 1.5397195509852741, + "learning_rate": 0.0001374, + "loss": 2.6473, + "step": 916 + }, + { + "epoch": 0.591994835377663, + "grad_norm": 1.4912901480122243, + "learning_rate": 0.00013754999999999998, + "loss": 2.5091, + "step": 917 + }, + { + "epoch": 0.592640413169787, + "grad_norm": 1.478611794730723, + "learning_rate": 0.00013769999999999999, + "loss": 2.5864, + "step": 918 + }, + { + "epoch": 0.5932859909619109, + "grad_norm": 2.0705823249838597, + "learning_rate": 0.00013785, + "loss": 2.892, + "step": 919 + }, + { + "epoch": 0.5939315687540349, + "grad_norm": 1.651535081099624, + "learning_rate": 0.000138, + "loss": 2.503, + "step": 920 + }, + { + "epoch": 0.5945771465461588, + "grad_norm": 1.9431469861679453, + "learning_rate": 0.00013815, + "loss": 2.6066, + "step": 921 + }, + { + "epoch": 0.5952227243382827, + "grad_norm": 1.5303282954497104, + "learning_rate": 0.0001383, + "loss": 2.6315, + "step": 922 + }, + { + "epoch": 0.5958683021304068, + "grad_norm": 1.472120417094415, + "learning_rate": 0.00013845, + "loss": 2.4941, + "step": 923 + }, + { + "epoch": 0.5965138799225307, + "grad_norm": 1.4529950898812152, + "learning_rate": 0.0001386, + "loss": 2.5341, + "step": 924 + }, + { + "epoch": 0.5971594577146546, + "grad_norm": 1.3745688805081913, + "learning_rate": 0.00013874999999999998, + "loss": 2.5924, + "step": 925 + }, + { + "epoch": 0.5978050355067785, + "grad_norm": 1.4984594870116321, + "learning_rate": 0.0001389, + "loss": 2.6547, + "step": 926 + }, + { + "epoch": 0.5984506132989025, + "grad_norm": 1.4622344550525024, + "learning_rate": 0.00013905, + "loss": 2.5145, + "step": 927 + }, + { + "epoch": 0.5990961910910265, + "grad_norm": 1.4170648742572216, + "learning_rate": 0.0001392, + "loss": 2.0986, + "step": 928 + }, + { + "epoch": 0.5997417688831505, + "grad_norm": 1.6557146866011252, + "learning_rate": 0.00013935, + "loss": 2.1055, + "step": 929 + }, + { + "epoch": 0.6003873466752744, + "grad_norm": 1.562326153417595, + "learning_rate": 0.0001395, + "loss": 2.4491, + "step": 930 + }, + { + "epoch": 0.6010329244673983, + "grad_norm": 1.5086303116607984, + "learning_rate": 0.00013965, + "loss": 2.3952, + "step": 931 + }, + { + "epoch": 0.6016785022595222, + "grad_norm": 1.7142211799179978, + "learning_rate": 0.00013979999999999998, + "loss": 2.8341, + "step": 932 + }, + { + "epoch": 0.6023240800516462, + "grad_norm": 1.5656649673865084, + "learning_rate": 0.00013995, + "loss": 2.665, + "step": 933 + }, + { + "epoch": 0.6029696578437702, + "grad_norm": 1.4654262483504206, + "learning_rate": 0.0001401, + "loss": 2.0851, + "step": 934 + }, + { + "epoch": 0.6036152356358941, + "grad_norm": 1.3724452509804574, + "learning_rate": 0.00014025, + "loss": 2.5638, + "step": 935 + }, + { + "epoch": 0.6042608134280181, + "grad_norm": 1.4115945843641564, + "learning_rate": 0.0001404, + "loss": 2.0842, + "step": 936 + }, + { + "epoch": 0.604906391220142, + "grad_norm": 1.5655252416662686, + "learning_rate": 0.00014055, + "loss": 2.7325, + "step": 937 + }, + { + "epoch": 0.6055519690122659, + "grad_norm": 1.478034844411789, + "learning_rate": 0.00014069999999999998, + "loss": 2.5737, + "step": 938 + }, + { + "epoch": 0.60619754680439, + "grad_norm": 1.5860713897668384, + "learning_rate": 0.00014084999999999998, + "loss": 2.704, + "step": 939 + }, + { + "epoch": 0.6068431245965139, + "grad_norm": 1.5561134793037639, + "learning_rate": 0.00014099999999999998, + "loss": 2.1313, + "step": 940 + }, + { + "epoch": 0.6074887023886378, + "grad_norm": 1.4683348328190204, + "learning_rate": 0.00014115, + "loss": 2.5773, + "step": 941 + }, + { + "epoch": 0.6081342801807618, + "grad_norm": 1.7362815828922002, + "learning_rate": 0.0001413, + "loss": 2.7055, + "step": 942 + }, + { + "epoch": 0.6087798579728857, + "grad_norm": 1.420761143200216, + "learning_rate": 0.00014144999999999997, + "loss": 2.5862, + "step": 943 + }, + { + "epoch": 0.6094254357650097, + "grad_norm": 1.8799988479750251, + "learning_rate": 0.00014159999999999997, + "loss": 2.4732, + "step": 944 + }, + { + "epoch": 0.6100710135571337, + "grad_norm": 1.636574688796385, + "learning_rate": 0.00014174999999999998, + "loss": 2.6449, + "step": 945 + }, + { + "epoch": 0.6107165913492576, + "grad_norm": 1.599473511378371, + "learning_rate": 0.00014189999999999998, + "loss": 2.2384, + "step": 946 + }, + { + "epoch": 0.6113621691413815, + "grad_norm": 1.4480367835367636, + "learning_rate": 0.00014204999999999998, + "loss": 2.4844, + "step": 947 + }, + { + "epoch": 0.6120077469335055, + "grad_norm": 1.5503640097018134, + "learning_rate": 0.0001422, + "loss": 2.7293, + "step": 948 + }, + { + "epoch": 0.6126533247256294, + "grad_norm": 1.6389663340140415, + "learning_rate": 0.00014235, + "loss": 2.6298, + "step": 949 + }, + { + "epoch": 0.6132989025177534, + "grad_norm": 1.4604302233979627, + "learning_rate": 0.0001425, + "loss": 2.6141, + "step": 950 + }, + { + "epoch": 0.6139444803098774, + "grad_norm": 1.4849461719405401, + "learning_rate": 0.00014264999999999997, + "loss": 2.5067, + "step": 951 + }, + { + "epoch": 0.6145900581020013, + "grad_norm": 1.482582422151832, + "learning_rate": 0.00014279999999999997, + "loss": 2.6957, + "step": 952 + }, + { + "epoch": 0.6152356358941252, + "grad_norm": 1.866999958505833, + "learning_rate": 0.00014294999999999998, + "loss": 2.5278, + "step": 953 + }, + { + "epoch": 0.6158812136862492, + "grad_norm": 1.5031959763787779, + "learning_rate": 0.00014309999999999998, + "loss": 2.6727, + "step": 954 + }, + { + "epoch": 0.6165267914783732, + "grad_norm": 1.4150894943522414, + "learning_rate": 0.00014324999999999999, + "loss": 2.5723, + "step": 955 + }, + { + "epoch": 0.6171723692704971, + "grad_norm": 1.4880433192168536, + "learning_rate": 0.0001434, + "loss": 2.6676, + "step": 956 + }, + { + "epoch": 0.617817947062621, + "grad_norm": 1.3453639164856155, + "learning_rate": 0.00014355, + "loss": 2.3937, + "step": 957 + }, + { + "epoch": 0.618463524854745, + "grad_norm": 1.4824191789593215, + "learning_rate": 0.00014369999999999997, + "loss": 2.476, + "step": 958 + }, + { + "epoch": 0.6191091026468689, + "grad_norm": 1.6595203250053865, + "learning_rate": 0.00014384999999999997, + "loss": 2.3042, + "step": 959 + }, + { + "epoch": 0.6197546804389928, + "grad_norm": 1.6171571894777512, + "learning_rate": 0.00014399999999999998, + "loss": 2.7038, + "step": 960 + }, + { + "epoch": 0.6204002582311169, + "grad_norm": 1.6467701902855174, + "learning_rate": 0.00014414999999999998, + "loss": 2.7463, + "step": 961 + }, + { + "epoch": 0.6210458360232408, + "grad_norm": 1.4680027007346246, + "learning_rate": 0.00014429999999999998, + "loss": 2.5151, + "step": 962 + }, + { + "epoch": 0.6216914138153647, + "grad_norm": 1.6785422277433135, + "learning_rate": 0.00014445, + "loss": 2.6788, + "step": 963 + }, + { + "epoch": 0.6223369916074887, + "grad_norm": 1.5789222478149059, + "learning_rate": 0.0001446, + "loss": 2.6628, + "step": 964 + }, + { + "epoch": 0.6229825693996126, + "grad_norm": 1.5329014351982546, + "learning_rate": 0.00014475, + "loss": 2.863, + "step": 965 + }, + { + "epoch": 0.6236281471917366, + "grad_norm": 1.2812495949037326, + "learning_rate": 0.00014489999999999997, + "loss": 2.4028, + "step": 966 + }, + { + "epoch": 0.6242737249838606, + "grad_norm": 1.5254816512448814, + "learning_rate": 0.00014504999999999997, + "loss": 2.7684, + "step": 967 + }, + { + "epoch": 0.6249193027759845, + "grad_norm": 1.42810636352982, + "learning_rate": 0.00014519999999999998, + "loss": 2.5714, + "step": 968 + }, + { + "epoch": 0.6255648805681084, + "grad_norm": 1.421179439537124, + "learning_rate": 0.00014534999999999998, + "loss": 2.5357, + "step": 969 + }, + { + "epoch": 0.6262104583602324, + "grad_norm": 1.5195185897771348, + "learning_rate": 0.00014549999999999999, + "loss": 2.6164, + "step": 970 + }, + { + "epoch": 0.6268560361523564, + "grad_norm": 1.7033524326045306, + "learning_rate": 0.00014565, + "loss": 2.3925, + "step": 971 + }, + { + "epoch": 0.6275016139444803, + "grad_norm": 1.5449676868177316, + "learning_rate": 0.0001458, + "loss": 2.6497, + "step": 972 + }, + { + "epoch": 0.6281471917366043, + "grad_norm": 1.6354725997543236, + "learning_rate": 0.00014595, + "loss": 2.7503, + "step": 973 + }, + { + "epoch": 0.6287927695287282, + "grad_norm": 1.477233445726324, + "learning_rate": 0.00014609999999999997, + "loss": 2.7024, + "step": 974 + }, + { + "epoch": 0.6294383473208521, + "grad_norm": 1.5999591073260822, + "learning_rate": 0.00014624999999999998, + "loss": 2.8854, + "step": 975 + }, + { + "epoch": 0.6300839251129761, + "grad_norm": 1.530098775459813, + "learning_rate": 0.00014639999999999998, + "loss": 2.4108, + "step": 976 + }, + { + "epoch": 0.6307295029051001, + "grad_norm": 1.5320900663234376, + "learning_rate": 0.00014654999999999998, + "loss": 2.3788, + "step": 977 + }, + { + "epoch": 0.631375080697224, + "grad_norm": 1.66011463202699, + "learning_rate": 0.0001467, + "loss": 2.1751, + "step": 978 + }, + { + "epoch": 0.632020658489348, + "grad_norm": 1.6942431522179178, + "learning_rate": 0.00014685, + "loss": 2.4144, + "step": 979 + }, + { + "epoch": 0.6326662362814719, + "grad_norm": 1.6503084844480815, + "learning_rate": 0.000147, + "loss": 2.6989, + "step": 980 + }, + { + "epoch": 0.6333118140735958, + "grad_norm": 1.6310126707422212, + "learning_rate": 0.00014714999999999997, + "loss": 2.6084, + "step": 981 + }, + { + "epoch": 0.6339573918657199, + "grad_norm": 1.36236563372741, + "learning_rate": 0.00014729999999999998, + "loss": 2.4679, + "step": 982 + }, + { + "epoch": 0.6346029696578438, + "grad_norm": 1.4132177345374923, + "learning_rate": 0.00014744999999999998, + "loss": 2.3945, + "step": 983 + }, + { + "epoch": 0.6352485474499677, + "grad_norm": 1.7029225870704505, + "learning_rate": 0.00014759999999999998, + "loss": 2.3505, + "step": 984 + }, + { + "epoch": 0.6358941252420917, + "grad_norm": 1.4348412389092664, + "learning_rate": 0.00014774999999999999, + "loss": 2.5678, + "step": 985 + }, + { + "epoch": 0.6365397030342156, + "grad_norm": 1.8370574676701863, + "learning_rate": 0.0001479, + "loss": 2.112, + "step": 986 + }, + { + "epoch": 0.6371852808263396, + "grad_norm": 1.3861999423987625, + "learning_rate": 0.00014805, + "loss": 2.7491, + "step": 987 + }, + { + "epoch": 0.6378308586184636, + "grad_norm": 1.4556511512282255, + "learning_rate": 0.0001482, + "loss": 2.6284, + "step": 988 + }, + { + "epoch": 0.6384764364105875, + "grad_norm": 1.5690377131884252, + "learning_rate": 0.00014834999999999997, + "loss": 2.6638, + "step": 989 + }, + { + "epoch": 0.6391220142027114, + "grad_norm": 1.6351443737153915, + "learning_rate": 0.00014849999999999998, + "loss": 2.4736, + "step": 990 + }, + { + "epoch": 0.6397675919948353, + "grad_norm": 1.7816960703421758, + "learning_rate": 0.00014864999999999998, + "loss": 2.6229, + "step": 991 + }, + { + "epoch": 0.6404131697869593, + "grad_norm": 1.5213287598226686, + "learning_rate": 0.00014879999999999998, + "loss": 2.5948, + "step": 992 + }, + { + "epoch": 0.6410587475790833, + "grad_norm": 2.328133448885884, + "learning_rate": 0.00014895, + "loss": 2.8192, + "step": 993 + }, + { + "epoch": 0.6417043253712073, + "grad_norm": 1.5916721248726284, + "learning_rate": 0.0001491, + "loss": 2.4613, + "step": 994 + }, + { + "epoch": 0.6423499031633312, + "grad_norm": 1.554876756636239, + "learning_rate": 0.00014925, + "loss": 2.4572, + "step": 995 + }, + { + "epoch": 0.6429954809554551, + "grad_norm": 1.8154025556010778, + "learning_rate": 0.0001494, + "loss": 2.7926, + "step": 996 + }, + { + "epoch": 0.643641058747579, + "grad_norm": 1.6682824823747753, + "learning_rate": 0.00014954999999999998, + "loss": 2.447, + "step": 997 + }, + { + "epoch": 0.6442866365397031, + "grad_norm": 1.4614461909604333, + "learning_rate": 0.00014969999999999998, + "loss": 2.0542, + "step": 998 + }, + { + "epoch": 0.644932214331827, + "grad_norm": 1.4619460608342236, + "learning_rate": 0.00014984999999999998, + "loss": 2.51, + "step": 999 + }, + { + "epoch": 0.6455777921239509, + "grad_norm": 1.4179790017525018, + "learning_rate": 0.00015, + "loss": 2.6067, + "step": 1000 + }, + { + "epoch": 0.6462233699160749, + "grad_norm": 1.5811447288880827, + "learning_rate": 0.00015014999999999996, + "loss": 2.6139, + "step": 1001 + }, + { + "epoch": 0.6468689477081988, + "grad_norm": 1.5441680415128871, + "learning_rate": 0.0001503, + "loss": 2.6141, + "step": 1002 + }, + { + "epoch": 0.6475145255003227, + "grad_norm": 1.4171877032454558, + "learning_rate": 0.00015044999999999997, + "loss": 2.5068, + "step": 1003 + }, + { + "epoch": 0.6481601032924468, + "grad_norm": 1.5178874623978862, + "learning_rate": 0.00015059999999999997, + "loss": 2.7492, + "step": 1004 + }, + { + "epoch": 0.6488056810845707, + "grad_norm": 1.3355959002130051, + "learning_rate": 0.00015074999999999998, + "loss": 2.5945, + "step": 1005 + }, + { + "epoch": 0.6494512588766946, + "grad_norm": 1.596004484174637, + "learning_rate": 0.00015089999999999998, + "loss": 2.73, + "step": 1006 + }, + { + "epoch": 0.6500968366688186, + "grad_norm": 1.3172944893881589, + "learning_rate": 0.00015104999999999996, + "loss": 2.7353, + "step": 1007 + }, + { + "epoch": 0.6507424144609425, + "grad_norm": 1.3070084767452488, + "learning_rate": 0.0001512, + "loss": 2.3843, + "step": 1008 + }, + { + "epoch": 0.6513879922530665, + "grad_norm": 1.4901909392311166, + "learning_rate": 0.00015134999999999997, + "loss": 2.424, + "step": 1009 + }, + { + "epoch": 0.6520335700451905, + "grad_norm": 1.6767143927903108, + "learning_rate": 0.0001515, + "loss": 2.813, + "step": 1010 + }, + { + "epoch": 0.6526791478373144, + "grad_norm": 1.5330578014414058, + "learning_rate": 0.00015164999999999997, + "loss": 2.6285, + "step": 1011 + }, + { + "epoch": 0.6533247256294383, + "grad_norm": 1.5161694166200577, + "learning_rate": 0.00015179999999999998, + "loss": 2.5408, + "step": 1012 + }, + { + "epoch": 0.6539703034215623, + "grad_norm": 1.5307992554561822, + "learning_rate": 0.00015194999999999998, + "loss": 2.2569, + "step": 1013 + }, + { + "epoch": 0.6546158812136863, + "grad_norm": 1.3922175290990226, + "learning_rate": 0.00015209999999999998, + "loss": 2.7015, + "step": 1014 + }, + { + "epoch": 0.6552614590058102, + "grad_norm": 1.6368105147621195, + "learning_rate": 0.00015224999999999996, + "loss": 2.8194, + "step": 1015 + }, + { + "epoch": 0.6559070367979342, + "grad_norm": 1.465225890376384, + "learning_rate": 0.0001524, + "loss": 2.6412, + "step": 1016 + }, + { + "epoch": 0.6565526145900581, + "grad_norm": 1.546696604259728, + "learning_rate": 0.00015254999999999997, + "loss": 2.6429, + "step": 1017 + }, + { + "epoch": 0.657198192382182, + "grad_norm": 1.5832090151561296, + "learning_rate": 0.0001527, + "loss": 2.7857, + "step": 1018 + }, + { + "epoch": 0.657843770174306, + "grad_norm": 1.5252398780705374, + "learning_rate": 0.00015284999999999997, + "loss": 2.7171, + "step": 1019 + }, + { + "epoch": 0.65848934796643, + "grad_norm": 1.8379332665373227, + "learning_rate": 0.00015299999999999998, + "loss": 2.8059, + "step": 1020 + }, + { + "epoch": 0.6591349257585539, + "grad_norm": 1.715475101166195, + "learning_rate": 0.00015314999999999998, + "loss": 2.6878, + "step": 1021 + }, + { + "epoch": 0.6597805035506779, + "grad_norm": 1.5149291423571445, + "learning_rate": 0.00015329999999999999, + "loss": 2.4694, + "step": 1022 + }, + { + "epoch": 0.6604260813428018, + "grad_norm": 1.5800930117142487, + "learning_rate": 0.00015344999999999996, + "loss": 2.6562, + "step": 1023 + }, + { + "epoch": 0.6610716591349257, + "grad_norm": 1.5682323452393245, + "learning_rate": 0.0001536, + "loss": 2.6092, + "step": 1024 + }, + { + "epoch": 0.6617172369270498, + "grad_norm": 1.4460582856487574, + "learning_rate": 0.00015374999999999997, + "loss": 2.7517, + "step": 1025 + }, + { + "epoch": 0.6623628147191737, + "grad_norm": 1.397611626655054, + "learning_rate": 0.0001539, + "loss": 2.507, + "step": 1026 + }, + { + "epoch": 0.6630083925112976, + "grad_norm": 1.4600113054365087, + "learning_rate": 0.00015404999999999998, + "loss": 2.6369, + "step": 1027 + }, + { + "epoch": 0.6636539703034215, + "grad_norm": 1.6336327023813024, + "learning_rate": 0.00015419999999999998, + "loss": 2.4436, + "step": 1028 + }, + { + "epoch": 0.6642995480955455, + "grad_norm": 1.4254276588831332, + "learning_rate": 0.00015434999999999998, + "loss": 2.5261, + "step": 1029 + }, + { + "epoch": 0.6649451258876695, + "grad_norm": 1.6894687838455849, + "learning_rate": 0.0001545, + "loss": 2.5536, + "step": 1030 + }, + { + "epoch": 0.6655907036797934, + "grad_norm": 1.5548001137409915, + "learning_rate": 0.00015464999999999996, + "loss": 2.605, + "step": 1031 + }, + { + "epoch": 0.6662362814719174, + "grad_norm": 1.5843338274406609, + "learning_rate": 0.0001548, + "loss": 2.4725, + "step": 1032 + }, + { + "epoch": 0.6668818592640413, + "grad_norm": 1.486573661798984, + "learning_rate": 0.00015494999999999997, + "loss": 2.5512, + "step": 1033 + }, + { + "epoch": 0.6675274370561652, + "grad_norm": 1.6229393235544125, + "learning_rate": 0.0001551, + "loss": 2.2764, + "step": 1034 + }, + { + "epoch": 0.6681730148482892, + "grad_norm": 1.6285227368990518, + "learning_rate": 0.00015524999999999998, + "loss": 2.5886, + "step": 1035 + }, + { + "epoch": 0.6688185926404132, + "grad_norm": 1.4752053389321038, + "learning_rate": 0.00015539999999999998, + "loss": 2.3949, + "step": 1036 + }, + { + "epoch": 0.6694641704325371, + "grad_norm": 1.7264169987925089, + "learning_rate": 0.00015554999999999999, + "loss": 2.6335, + "step": 1037 + }, + { + "epoch": 0.6701097482246611, + "grad_norm": 1.5637417339644175, + "learning_rate": 0.0001557, + "loss": 2.8016, + "step": 1038 + }, + { + "epoch": 0.670755326016785, + "grad_norm": 1.4214349055752726, + "learning_rate": 0.00015584999999999997, + "loss": 2.6531, + "step": 1039 + }, + { + "epoch": 0.6714009038089089, + "grad_norm": 1.3693851089428366, + "learning_rate": 0.000156, + "loss": 2.3298, + "step": 1040 + }, + { + "epoch": 0.672046481601033, + "grad_norm": 2.322981997231504, + "learning_rate": 0.00015614999999999997, + "loss": 2.5378, + "step": 1041 + }, + { + "epoch": 0.6726920593931569, + "grad_norm": 1.4174773994224672, + "learning_rate": 0.0001563, + "loss": 2.5053, + "step": 1042 + }, + { + "epoch": 0.6733376371852808, + "grad_norm": 1.872200316530706, + "learning_rate": 0.00015644999999999998, + "loss": 2.7559, + "step": 1043 + }, + { + "epoch": 0.6739832149774048, + "grad_norm": 1.5502297350093401, + "learning_rate": 0.00015659999999999998, + "loss": 2.685, + "step": 1044 + }, + { + "epoch": 0.6746287927695287, + "grad_norm": 1.4289868614259678, + "learning_rate": 0.00015675, + "loss": 2.293, + "step": 1045 + }, + { + "epoch": 0.6752743705616526, + "grad_norm": 1.5167137780007365, + "learning_rate": 0.0001569, + "loss": 2.5719, + "step": 1046 + }, + { + "epoch": 0.6759199483537767, + "grad_norm": 1.567763268961761, + "learning_rate": 0.00015704999999999997, + "loss": 2.46, + "step": 1047 + }, + { + "epoch": 0.6765655261459006, + "grad_norm": 1.535424259832485, + "learning_rate": 0.0001572, + "loss": 2.6615, + "step": 1048 + }, + { + "epoch": 0.6772111039380245, + "grad_norm": 1.4692787108519483, + "learning_rate": 0.00015734999999999998, + "loss": 2.5447, + "step": 1049 + }, + { + "epoch": 0.6778566817301485, + "grad_norm": 1.4652536045061857, + "learning_rate": 0.00015749999999999998, + "loss": 2.6537, + "step": 1050 + }, + { + "epoch": 0.6785022595222724, + "grad_norm": 1.4654142772722833, + "learning_rate": 0.00015764999999999998, + "loss": 2.5076, + "step": 1051 + }, + { + "epoch": 0.6791478373143964, + "grad_norm": 1.4380076218718998, + "learning_rate": 0.0001578, + "loss": 2.5362, + "step": 1052 + }, + { + "epoch": 0.6797934151065204, + "grad_norm": 1.4521380128248913, + "learning_rate": 0.00015794999999999996, + "loss": 2.1076, + "step": 1053 + }, + { + "epoch": 0.6804389928986443, + "grad_norm": 1.3719848819709914, + "learning_rate": 0.0001581, + "loss": 2.6144, + "step": 1054 + }, + { + "epoch": 0.6810845706907682, + "grad_norm": 1.4344355896755077, + "learning_rate": 0.00015824999999999997, + "loss": 2.5844, + "step": 1055 + }, + { + "epoch": 0.6817301484828922, + "grad_norm": 1.3284849121627402, + "learning_rate": 0.0001584, + "loss": 2.35, + "step": 1056 + }, + { + "epoch": 0.6823757262750162, + "grad_norm": 1.428376358900013, + "learning_rate": 0.00015854999999999998, + "loss": 2.4179, + "step": 1057 + }, + { + "epoch": 0.6830213040671401, + "grad_norm": 1.610315112974196, + "learning_rate": 0.00015869999999999998, + "loss": 2.0006, + "step": 1058 + }, + { + "epoch": 0.683666881859264, + "grad_norm": 1.4880044032426682, + "learning_rate": 0.00015884999999999999, + "loss": 2.8033, + "step": 1059 + }, + { + "epoch": 0.684312459651388, + "grad_norm": 1.3995554438182303, + "learning_rate": 0.000159, + "loss": 2.5015, + "step": 1060 + }, + { + "epoch": 0.6849580374435119, + "grad_norm": 1.4648572788435892, + "learning_rate": 0.00015914999999999997, + "loss": 2.3056, + "step": 1061 + }, + { + "epoch": 0.6856036152356358, + "grad_norm": 1.3869687807219597, + "learning_rate": 0.0001593, + "loss": 2.57, + "step": 1062 + }, + { + "epoch": 0.6862491930277599, + "grad_norm": 1.443869428443665, + "learning_rate": 0.00015944999999999997, + "loss": 2.5515, + "step": 1063 + }, + { + "epoch": 0.6868947708198838, + "grad_norm": 1.4223411781037323, + "learning_rate": 0.0001596, + "loss": 2.2494, + "step": 1064 + }, + { + "epoch": 0.6875403486120077, + "grad_norm": 1.4175344371218663, + "learning_rate": 0.00015974999999999998, + "loss": 2.5107, + "step": 1065 + }, + { + "epoch": 0.6881859264041317, + "grad_norm": 1.4810946641129858, + "learning_rate": 0.00015989999999999998, + "loss": 2.7639, + "step": 1066 + }, + { + "epoch": 0.6888315041962556, + "grad_norm": 1.2524751409304806, + "learning_rate": 0.00016005, + "loss": 2.3502, + "step": 1067 + }, + { + "epoch": 0.6894770819883796, + "grad_norm": 1.4826655933024013, + "learning_rate": 0.0001602, + "loss": 2.7992, + "step": 1068 + }, + { + "epoch": 0.6901226597805036, + "grad_norm": 1.4598774302370197, + "learning_rate": 0.00016034999999999997, + "loss": 2.4853, + "step": 1069 + }, + { + "epoch": 0.6907682375726275, + "grad_norm": 1.4400297589363236, + "learning_rate": 0.0001605, + "loss": 2.3296, + "step": 1070 + }, + { + "epoch": 0.6914138153647514, + "grad_norm": 1.379422709612617, + "learning_rate": 0.00016064999999999997, + "loss": 2.6198, + "step": 1071 + }, + { + "epoch": 0.6920593931568754, + "grad_norm": 1.4516936538520182, + "learning_rate": 0.0001608, + "loss": 2.7363, + "step": 1072 + }, + { + "epoch": 0.6927049709489993, + "grad_norm": 1.3971485967331903, + "learning_rate": 0.00016094999999999998, + "loss": 2.313, + "step": 1073 + }, + { + "epoch": 0.6933505487411233, + "grad_norm": 1.5880521871647817, + "learning_rate": 0.00016109999999999999, + "loss": 2.4674, + "step": 1074 + }, + { + "epoch": 0.6939961265332473, + "grad_norm": 1.3174946209678158, + "learning_rate": 0.00016125, + "loss": 2.3838, + "step": 1075 + }, + { + "epoch": 0.6946417043253712, + "grad_norm": 1.6845378029587355, + "learning_rate": 0.0001614, + "loss": 2.644, + "step": 1076 + }, + { + "epoch": 0.6952872821174951, + "grad_norm": 1.5643002687457652, + "learning_rate": 0.00016154999999999997, + "loss": 2.6391, + "step": 1077 + }, + { + "epoch": 0.6959328599096191, + "grad_norm": 1.5838211972508263, + "learning_rate": 0.0001617, + "loss": 2.8901, + "step": 1078 + }, + { + "epoch": 0.6965784377017431, + "grad_norm": 1.4592706076099577, + "learning_rate": 0.00016184999999999998, + "loss": 2.5322, + "step": 1079 + }, + { + "epoch": 0.697224015493867, + "grad_norm": 1.369568369339783, + "learning_rate": 0.000162, + "loss": 2.627, + "step": 1080 + }, + { + "epoch": 0.697869593285991, + "grad_norm": 1.5719248254943132, + "learning_rate": 0.00016214999999999998, + "loss": 2.5346, + "step": 1081 + }, + { + "epoch": 0.6985151710781149, + "grad_norm": 1.4593808805153448, + "learning_rate": 0.0001623, + "loss": 2.6586, + "step": 1082 + }, + { + "epoch": 0.6991607488702388, + "grad_norm": 1.4033656584915248, + "learning_rate": 0.00016245, + "loss": 2.6557, + "step": 1083 + }, + { + "epoch": 0.6998063266623629, + "grad_norm": 1.3400349876934594, + "learning_rate": 0.0001626, + "loss": 2.2737, + "step": 1084 + }, + { + "epoch": 0.7004519044544868, + "grad_norm": 1.3171334320333037, + "learning_rate": 0.00016274999999999997, + "loss": 2.1546, + "step": 1085 + }, + { + "epoch": 0.7010974822466107, + "grad_norm": 1.5444452950036192, + "learning_rate": 0.0001629, + "loss": 2.6677, + "step": 1086 + }, + { + "epoch": 0.7017430600387347, + "grad_norm": 1.4386921706391886, + "learning_rate": 0.00016304999999999998, + "loss": 2.6619, + "step": 1087 + }, + { + "epoch": 0.7023886378308586, + "grad_norm": 1.4904867270349063, + "learning_rate": 0.0001632, + "loss": 2.6745, + "step": 1088 + }, + { + "epoch": 0.7030342156229825, + "grad_norm": 1.2514073640613517, + "learning_rate": 0.00016334999999999999, + "loss": 2.516, + "step": 1089 + }, + { + "epoch": 0.7036797934151066, + "grad_norm": 1.4148829146937092, + "learning_rate": 0.0001635, + "loss": 2.7281, + "step": 1090 + }, + { + "epoch": 0.7043253712072305, + "grad_norm": 1.6288304405577045, + "learning_rate": 0.00016365, + "loss": 2.68, + "step": 1091 + }, + { + "epoch": 0.7049709489993544, + "grad_norm": 1.4161039678456693, + "learning_rate": 0.0001638, + "loss": 2.5087, + "step": 1092 + }, + { + "epoch": 0.7056165267914783, + "grad_norm": 1.328697209183801, + "learning_rate": 0.00016394999999999997, + "loss": 2.541, + "step": 1093 + }, + { + "epoch": 0.7062621045836023, + "grad_norm": 1.5552542838816161, + "learning_rate": 0.0001641, + "loss": 2.641, + "step": 1094 + }, + { + "epoch": 0.7069076823757263, + "grad_norm": 1.4729873677634555, + "learning_rate": 0.00016424999999999998, + "loss": 2.2076, + "step": 1095 + }, + { + "epoch": 0.7075532601678503, + "grad_norm": 1.6452798033701428, + "learning_rate": 0.0001644, + "loss": 2.583, + "step": 1096 + }, + { + "epoch": 0.7081988379599742, + "grad_norm": 1.4551197296702945, + "learning_rate": 0.00016455, + "loss": 2.6376, + "step": 1097 + }, + { + "epoch": 0.7088444157520981, + "grad_norm": 1.4302879327398355, + "learning_rate": 0.0001647, + "loss": 2.4443, + "step": 1098 + }, + { + "epoch": 0.709489993544222, + "grad_norm": 1.3512255869780196, + "learning_rate": 0.00016485, + "loss": 2.5316, + "step": 1099 + }, + { + "epoch": 0.7101355713363461, + "grad_norm": 1.4879045359277447, + "learning_rate": 0.000165, + "loss": 2.2356, + "step": 1100 + }, + { + "epoch": 0.71078114912847, + "grad_norm": 1.7858862740191996, + "learning_rate": 0.00016514999999999998, + "loss": 2.6731, + "step": 1101 + }, + { + "epoch": 0.7114267269205939, + "grad_norm": 1.53062617931313, + "learning_rate": 0.0001653, + "loss": 2.6715, + "step": 1102 + }, + { + "epoch": 0.7120723047127179, + "grad_norm": 1.3177944205386078, + "learning_rate": 0.00016544999999999998, + "loss": 2.6595, + "step": 1103 + }, + { + "epoch": 0.7127178825048418, + "grad_norm": 1.6364067677499177, + "learning_rate": 0.0001656, + "loss": 2.629, + "step": 1104 + }, + { + "epoch": 0.7133634602969657, + "grad_norm": 1.6224612983736533, + "learning_rate": 0.00016575, + "loss": 2.7178, + "step": 1105 + }, + { + "epoch": 0.7140090380890898, + "grad_norm": 1.4379578596210916, + "learning_rate": 0.0001659, + "loss": 2.653, + "step": 1106 + }, + { + "epoch": 0.7146546158812137, + "grad_norm": 1.3426712339896678, + "learning_rate": 0.00016604999999999997, + "loss": 2.6961, + "step": 1107 + }, + { + "epoch": 0.7153001936733376, + "grad_norm": 1.2678173641351005, + "learning_rate": 0.0001662, + "loss": 2.3395, + "step": 1108 + }, + { + "epoch": 0.7159457714654616, + "grad_norm": 1.4660230480576704, + "learning_rate": 0.00016634999999999998, + "loss": 2.7893, + "step": 1109 + }, + { + "epoch": 0.7165913492575855, + "grad_norm": 1.4304918671360662, + "learning_rate": 0.0001665, + "loss": 2.153, + "step": 1110 + }, + { + "epoch": 0.7172369270497095, + "grad_norm": 1.3817103373949582, + "learning_rate": 0.00016664999999999998, + "loss": 2.4318, + "step": 1111 + }, + { + "epoch": 0.7178825048418335, + "grad_norm": 1.3279968736251917, + "learning_rate": 0.0001668, + "loss": 2.4623, + "step": 1112 + }, + { + "epoch": 0.7185280826339574, + "grad_norm": 1.3079037032870091, + "learning_rate": 0.00016695, + "loss": 2.1327, + "step": 1113 + }, + { + "epoch": 0.7191736604260813, + "grad_norm": 1.3290985202890675, + "learning_rate": 0.0001671, + "loss": 2.4986, + "step": 1114 + }, + { + "epoch": 0.7198192382182053, + "grad_norm": 1.340862356724497, + "learning_rate": 0.00016724999999999997, + "loss": 2.1656, + "step": 1115 + }, + { + "epoch": 0.7204648160103292, + "grad_norm": 1.485594610919615, + "learning_rate": 0.0001674, + "loss": 2.5952, + "step": 1116 + }, + { + "epoch": 0.7211103938024532, + "grad_norm": 1.5922990344657553, + "learning_rate": 0.00016754999999999998, + "loss": 2.7611, + "step": 1117 + }, + { + "epoch": 0.7217559715945772, + "grad_norm": 1.6089890175047965, + "learning_rate": 0.0001677, + "loss": 2.3836, + "step": 1118 + }, + { + "epoch": 0.7224015493867011, + "grad_norm": 1.631448000795453, + "learning_rate": 0.00016785, + "loss": 2.5773, + "step": 1119 + }, + { + "epoch": 0.723047127178825, + "grad_norm": 1.3619566739010114, + "learning_rate": 0.000168, + "loss": 2.7926, + "step": 1120 + }, + { + "epoch": 0.723692704970949, + "grad_norm": 1.363436022094993, + "learning_rate": 0.00016815, + "loss": 2.4981, + "step": 1121 + }, + { + "epoch": 0.724338282763073, + "grad_norm": 1.334098504845703, + "learning_rate": 0.0001683, + "loss": 2.5968, + "step": 1122 + }, + { + "epoch": 0.7249838605551969, + "grad_norm": 1.409522124408985, + "learning_rate": 0.00016844999999999997, + "loss": 2.7232, + "step": 1123 + }, + { + "epoch": 0.7256294383473209, + "grad_norm": 1.3684384795756268, + "learning_rate": 0.0001686, + "loss": 2.7188, + "step": 1124 + }, + { + "epoch": 0.7262750161394448, + "grad_norm": 1.3959154928738684, + "learning_rate": 0.00016874999999999998, + "loss": 2.5945, + "step": 1125 + }, + { + "epoch": 0.7269205939315687, + "grad_norm": 1.4845321892268104, + "learning_rate": 0.00016889999999999996, + "loss": 2.7998, + "step": 1126 + }, + { + "epoch": 0.7275661717236928, + "grad_norm": 1.3257513613432532, + "learning_rate": 0.00016905, + "loss": 2.6538, + "step": 1127 + }, + { + "epoch": 0.7282117495158167, + "grad_norm": 1.3786334890039573, + "learning_rate": 0.00016919999999999997, + "loss": 2.5987, + "step": 1128 + }, + { + "epoch": 0.7288573273079406, + "grad_norm": 1.3393703748085972, + "learning_rate": 0.00016935, + "loss": 2.6476, + "step": 1129 + }, + { + "epoch": 0.7295029051000645, + "grad_norm": 1.3931332943250239, + "learning_rate": 0.00016949999999999997, + "loss": 2.5753, + "step": 1130 + }, + { + "epoch": 0.7301484828921885, + "grad_norm": 1.3679340872781542, + "learning_rate": 0.00016964999999999998, + "loss": 2.6974, + "step": 1131 + }, + { + "epoch": 0.7307940606843124, + "grad_norm": 1.3745191754691481, + "learning_rate": 0.00016979999999999998, + "loss": 2.4647, + "step": 1132 + }, + { + "epoch": 0.7314396384764364, + "grad_norm": 1.4106263363193425, + "learning_rate": 0.00016994999999999998, + "loss": 2.4418, + "step": 1133 + }, + { + "epoch": 0.7320852162685604, + "grad_norm": 1.388716158289424, + "learning_rate": 0.00017009999999999996, + "loss": 2.4192, + "step": 1134 + }, + { + "epoch": 0.7327307940606843, + "grad_norm": 1.4432370254539102, + "learning_rate": 0.00017025, + "loss": 2.5198, + "step": 1135 + }, + { + "epoch": 0.7333763718528082, + "grad_norm": 1.5044856190143119, + "learning_rate": 0.00017039999999999997, + "loss": 2.3156, + "step": 1136 + }, + { + "epoch": 0.7340219496449322, + "grad_norm": 1.3817310654285093, + "learning_rate": 0.00017055, + "loss": 2.2671, + "step": 1137 + }, + { + "epoch": 0.7346675274370562, + "grad_norm": 1.287455934930454, + "learning_rate": 0.00017069999999999998, + "loss": 2.3881, + "step": 1138 + }, + { + "epoch": 0.7353131052291801, + "grad_norm": 1.4131221822244855, + "learning_rate": 0.00017084999999999998, + "loss": 2.6223, + "step": 1139 + }, + { + "epoch": 0.7359586830213041, + "grad_norm": 1.3050662207663688, + "learning_rate": 0.00017099999999999998, + "loss": 2.3789, + "step": 1140 + }, + { + "epoch": 0.736604260813428, + "grad_norm": 1.4182339603518659, + "learning_rate": 0.00017114999999999999, + "loss": 2.2474, + "step": 1141 + }, + { + "epoch": 0.7372498386055519, + "grad_norm": 1.3360388940815333, + "learning_rate": 0.00017129999999999996, + "loss": 2.6172, + "step": 1142 + }, + { + "epoch": 0.737895416397676, + "grad_norm": 1.3409594842458537, + "learning_rate": 0.00017145, + "loss": 2.0387, + "step": 1143 + }, + { + "epoch": 0.7385409941897999, + "grad_norm": 1.5075132760300058, + "learning_rate": 0.00017159999999999997, + "loss": 2.7153, + "step": 1144 + }, + { + "epoch": 0.7391865719819238, + "grad_norm": 1.3704582830247793, + "learning_rate": 0.00017175, + "loss": 2.5334, + "step": 1145 + }, + { + "epoch": 0.7398321497740478, + "grad_norm": 1.4239124827577931, + "learning_rate": 0.00017189999999999998, + "loss": 2.6389, + "step": 1146 + }, + { + "epoch": 0.7404777275661717, + "grad_norm": 1.322907297779158, + "learning_rate": 0.00017204999999999998, + "loss": 2.7222, + "step": 1147 + }, + { + "epoch": 0.7411233053582956, + "grad_norm": 1.3661321133752846, + "learning_rate": 0.00017219999999999998, + "loss": 2.4682, + "step": 1148 + }, + { + "epoch": 0.7417688831504197, + "grad_norm": 1.3851363749242624, + "learning_rate": 0.00017235, + "loss": 2.8139, + "step": 1149 + }, + { + "epoch": 0.7424144609425436, + "grad_norm": 1.342984918816199, + "learning_rate": 0.00017249999999999996, + "loss": 2.5529, + "step": 1150 + }, + { + "epoch": 0.7430600387346675, + "grad_norm": 1.358470305090547, + "learning_rate": 0.00017265, + "loss": 2.4396, + "step": 1151 + }, + { + "epoch": 0.7437056165267915, + "grad_norm": 1.4962135290857554, + "learning_rate": 0.00017279999999999997, + "loss": 2.9503, + "step": 1152 + }, + { + "epoch": 0.7443511943189154, + "grad_norm": 1.5046205485960094, + "learning_rate": 0.00017294999999999998, + "loss": 2.6277, + "step": 1153 + }, + { + "epoch": 0.7449967721110394, + "grad_norm": 1.4541556287057509, + "learning_rate": 0.00017309999999999998, + "loss": 2.795, + "step": 1154 + }, + { + "epoch": 0.7456423499031634, + "grad_norm": 1.251942913426966, + "learning_rate": 0.00017324999999999998, + "loss": 2.5747, + "step": 1155 + }, + { + "epoch": 0.7462879276952873, + "grad_norm": 1.3828498723671183, + "learning_rate": 0.00017339999999999996, + "loss": 2.6597, + "step": 1156 + }, + { + "epoch": 0.7469335054874112, + "grad_norm": 1.4373615657402126, + "learning_rate": 0.00017355, + "loss": 2.4297, + "step": 1157 + }, + { + "epoch": 0.7475790832795352, + "grad_norm": 1.3404307460142224, + "learning_rate": 0.00017369999999999997, + "loss": 2.8141, + "step": 1158 + }, + { + "epoch": 0.7482246610716591, + "grad_norm": 1.2807166609617404, + "learning_rate": 0.00017385, + "loss": 2.5453, + "step": 1159 + }, + { + "epoch": 0.7488702388637831, + "grad_norm": 1.3138039487352045, + "learning_rate": 0.00017399999999999997, + "loss": 2.3508, + "step": 1160 + }, + { + "epoch": 0.749515816655907, + "grad_norm": 1.393266991508206, + "learning_rate": 0.00017414999999999998, + "loss": 2.6676, + "step": 1161 + }, + { + "epoch": 0.750161394448031, + "grad_norm": 1.23521757145768, + "learning_rate": 0.00017429999999999998, + "loss": 2.0398, + "step": 1162 + }, + { + "epoch": 0.7508069722401549, + "grad_norm": 1.343085930446913, + "learning_rate": 0.00017444999999999998, + "loss": 2.5796, + "step": 1163 + }, + { + "epoch": 0.7514525500322788, + "grad_norm": 1.5252006424461162, + "learning_rate": 0.00017459999999999996, + "loss": 2.6259, + "step": 1164 + }, + { + "epoch": 0.7520981278244029, + "grad_norm": 1.7338599561845878, + "learning_rate": 0.00017475, + "loss": 2.8471, + "step": 1165 + }, + { + "epoch": 0.7527437056165268, + "grad_norm": 1.4331076530440978, + "learning_rate": 0.00017489999999999997, + "loss": 2.6182, + "step": 1166 + }, + { + "epoch": 0.7533892834086507, + "grad_norm": 1.480821245921537, + "learning_rate": 0.00017505, + "loss": 2.598, + "step": 1167 + }, + { + "epoch": 0.7540348612007747, + "grad_norm": 1.4631354880317708, + "learning_rate": 0.00017519999999999998, + "loss": 2.665, + "step": 1168 + }, + { + "epoch": 0.7546804389928986, + "grad_norm": 1.2993943570736073, + "learning_rate": 0.00017534999999999998, + "loss": 2.4722, + "step": 1169 + }, + { + "epoch": 0.7553260167850226, + "grad_norm": 1.5734888684484443, + "learning_rate": 0.00017549999999999998, + "loss": 2.7309, + "step": 1170 + }, + { + "epoch": 0.7559715945771466, + "grad_norm": 1.697376397829374, + "learning_rate": 0.00017565, + "loss": 2.1387, + "step": 1171 + }, + { + "epoch": 0.7566171723692705, + "grad_norm": 1.4834020559286143, + "learning_rate": 0.00017579999999999996, + "loss": 2.553, + "step": 1172 + }, + { + "epoch": 0.7572627501613944, + "grad_norm": 1.4267014343591518, + "learning_rate": 0.00017595, + "loss": 2.4941, + "step": 1173 + }, + { + "epoch": 0.7579083279535184, + "grad_norm": 1.6136586477038333, + "learning_rate": 0.00017609999999999997, + "loss": 2.6975, + "step": 1174 + }, + { + "epoch": 0.7585539057456423, + "grad_norm": 1.6721537048099553, + "learning_rate": 0.00017625, + "loss": 2.9697, + "step": 1175 + }, + { + "epoch": 0.7591994835377663, + "grad_norm": 1.4771753954407478, + "learning_rate": 0.00017639999999999998, + "loss": 2.5201, + "step": 1176 + }, + { + "epoch": 0.7598450613298903, + "grad_norm": 1.3189805693300418, + "learning_rate": 0.00017654999999999998, + "loss": 2.4519, + "step": 1177 + }, + { + "epoch": 0.7604906391220142, + "grad_norm": 1.4930904477109328, + "learning_rate": 0.00017669999999999999, + "loss": 2.6268, + "step": 1178 + }, + { + "epoch": 0.7611362169141381, + "grad_norm": 1.432719079963965, + "learning_rate": 0.00017685, + "loss": 2.6702, + "step": 1179 + }, + { + "epoch": 0.7617817947062621, + "grad_norm": 1.31053835826542, + "learning_rate": 0.00017699999999999997, + "loss": 2.5343, + "step": 1180 + }, + { + "epoch": 0.7624273724983861, + "grad_norm": 1.2585402193102306, + "learning_rate": 0.00017715, + "loss": 2.6463, + "step": 1181 + }, + { + "epoch": 0.76307295029051, + "grad_norm": 1.471708449785166, + "learning_rate": 0.00017729999999999997, + "loss": 2.5522, + "step": 1182 + }, + { + "epoch": 0.763718528082634, + "grad_norm": 1.4174597553840482, + "learning_rate": 0.00017745, + "loss": 2.5232, + "step": 1183 + }, + { + "epoch": 0.7643641058747579, + "grad_norm": 1.37484744381759, + "learning_rate": 0.00017759999999999998, + "loss": 2.2684, + "step": 1184 + }, + { + "epoch": 0.7650096836668818, + "grad_norm": 1.6468289634702196, + "learning_rate": 0.00017774999999999998, + "loss": 2.7579, + "step": 1185 + }, + { + "epoch": 0.7656552614590059, + "grad_norm": 1.4365483355785653, + "learning_rate": 0.0001779, + "loss": 2.727, + "step": 1186 + }, + { + "epoch": 0.7663008392511298, + "grad_norm": 1.3335944519055356, + "learning_rate": 0.00017805, + "loss": 2.5309, + "step": 1187 + }, + { + "epoch": 0.7669464170432537, + "grad_norm": 1.3306241794388936, + "learning_rate": 0.00017819999999999997, + "loss": 2.6836, + "step": 1188 + }, + { + "epoch": 0.7675919948353777, + "grad_norm": 1.3271592341912852, + "learning_rate": 0.00017835, + "loss": 2.7908, + "step": 1189 + }, + { + "epoch": 0.7682375726275016, + "grad_norm": 1.2745075430616526, + "learning_rate": 0.00017849999999999997, + "loss": 2.5232, + "step": 1190 + }, + { + "epoch": 0.7688831504196255, + "grad_norm": 1.369435118647796, + "learning_rate": 0.00017865, + "loss": 2.7765, + "step": 1191 + }, + { + "epoch": 0.7695287282117496, + "grad_norm": 1.526249962796695, + "learning_rate": 0.00017879999999999998, + "loss": 2.6469, + "step": 1192 + }, + { + "epoch": 0.7701743060038735, + "grad_norm": 1.3746512436642726, + "learning_rate": 0.00017894999999999999, + "loss": 2.7236, + "step": 1193 + }, + { + "epoch": 0.7708198837959974, + "grad_norm": 1.4237996622254387, + "learning_rate": 0.0001791, + "loss": 2.6908, + "step": 1194 + }, + { + "epoch": 0.7714654615881213, + "grad_norm": 1.276320949054831, + "learning_rate": 0.00017925, + "loss": 2.7002, + "step": 1195 + }, + { + "epoch": 0.7721110393802453, + "grad_norm": 1.3110452915182214, + "learning_rate": 0.00017939999999999997, + "loss": 2.6931, + "step": 1196 + }, + { + "epoch": 0.7727566171723693, + "grad_norm": 1.3122596993331694, + "learning_rate": 0.00017955, + "loss": 2.5397, + "step": 1197 + }, + { + "epoch": 0.7734021949644933, + "grad_norm": 1.3821423011338414, + "learning_rate": 0.00017969999999999998, + "loss": 2.4895, + "step": 1198 + }, + { + "epoch": 0.7740477727566172, + "grad_norm": 1.296564496973494, + "learning_rate": 0.00017984999999999998, + "loss": 2.4649, + "step": 1199 + }, + { + "epoch": 0.7746933505487411, + "grad_norm": 1.3802701371345674, + "learning_rate": 0.00017999999999999998, + "loss": 2.5744, + "step": 1200 + }, + { + "epoch": 0.775338928340865, + "grad_norm": 1.2468191017310442, + "learning_rate": 0.00018015, + "loss": 2.5521, + "step": 1201 + }, + { + "epoch": 0.775984506132989, + "grad_norm": 1.8557315918760733, + "learning_rate": 0.00018029999999999996, + "loss": 2.4629, + "step": 1202 + }, + { + "epoch": 0.776630083925113, + "grad_norm": 1.2993356281044317, + "learning_rate": 0.00018045, + "loss": 2.7784, + "step": 1203 + }, + { + "epoch": 0.7772756617172369, + "grad_norm": 1.5024192302153943, + "learning_rate": 0.00018059999999999997, + "loss": 2.6535, + "step": 1204 + }, + { + "epoch": 0.7779212395093609, + "grad_norm": 1.4064760294350922, + "learning_rate": 0.00018075, + "loss": 2.48, + "step": 1205 + }, + { + "epoch": 0.7785668173014848, + "grad_norm": 1.503798593688923, + "learning_rate": 0.00018089999999999998, + "loss": 2.54, + "step": 1206 + }, + { + "epoch": 0.7792123950936087, + "grad_norm": 1.304486772006149, + "learning_rate": 0.00018104999999999998, + "loss": 2.3729, + "step": 1207 + }, + { + "epoch": 0.7798579728857328, + "grad_norm": 1.4860795604111903, + "learning_rate": 0.00018119999999999999, + "loss": 2.6269, + "step": 1208 + }, + { + "epoch": 0.7805035506778567, + "grad_norm": 1.663553180164372, + "learning_rate": 0.00018135, + "loss": 2.9194, + "step": 1209 + }, + { + "epoch": 0.7811491284699806, + "grad_norm": 1.3397828461000072, + "learning_rate": 0.00018149999999999997, + "loss": 2.1346, + "step": 1210 + }, + { + "epoch": 0.7817947062621046, + "grad_norm": 1.379471346485944, + "learning_rate": 0.00018165, + "loss": 2.7517, + "step": 1211 + }, + { + "epoch": 0.7824402840542285, + "grad_norm": 1.3926250239675824, + "learning_rate": 0.00018179999999999997, + "loss": 2.5549, + "step": 1212 + }, + { + "epoch": 0.7830858618463525, + "grad_norm": 1.5713963491566443, + "learning_rate": 0.00018195, + "loss": 2.5366, + "step": 1213 + }, + { + "epoch": 0.7837314396384765, + "grad_norm": 1.7984298077865555, + "learning_rate": 0.00018209999999999998, + "loss": 2.1753, + "step": 1214 + }, + { + "epoch": 0.7843770174306004, + "grad_norm": 1.4137839629006133, + "learning_rate": 0.00018224999999999998, + "loss": 2.6289, + "step": 1215 + }, + { + "epoch": 0.7850225952227243, + "grad_norm": 1.4569917666955152, + "learning_rate": 0.0001824, + "loss": 2.5626, + "step": 1216 + }, + { + "epoch": 0.7856681730148483, + "grad_norm": 1.5306975416116204, + "learning_rate": 0.00018255, + "loss": 2.5694, + "step": 1217 + }, + { + "epoch": 0.7863137508069722, + "grad_norm": 1.4609137353626709, + "learning_rate": 0.00018269999999999997, + "loss": 2.4947, + "step": 1218 + }, + { + "epoch": 0.7869593285990962, + "grad_norm": 1.4782510597963472, + "learning_rate": 0.00018285, + "loss": 2.5614, + "step": 1219 + }, + { + "epoch": 0.7876049063912202, + "grad_norm": 1.4239103783962643, + "learning_rate": 0.00018299999999999998, + "loss": 2.6359, + "step": 1220 + }, + { + "epoch": 0.7882504841833441, + "grad_norm": 1.4800630941796256, + "learning_rate": 0.00018315, + "loss": 2.6024, + "step": 1221 + }, + { + "epoch": 0.788896061975468, + "grad_norm": 1.4328796066018488, + "learning_rate": 0.00018329999999999998, + "loss": 2.6626, + "step": 1222 + }, + { + "epoch": 0.789541639767592, + "grad_norm": 1.3975487629073033, + "learning_rate": 0.00018345, + "loss": 2.6338, + "step": 1223 + }, + { + "epoch": 0.790187217559716, + "grad_norm": 1.4385208066132507, + "learning_rate": 0.0001836, + "loss": 2.6235, + "step": 1224 + }, + { + "epoch": 0.7908327953518399, + "grad_norm": 1.3963426451019152, + "learning_rate": 0.00018375, + "loss": 2.5129, + "step": 1225 + }, + { + "epoch": 0.7914783731439639, + "grad_norm": 1.2775988165955308, + "learning_rate": 0.00018389999999999997, + "loss": 2.5461, + "step": 1226 + }, + { + "epoch": 0.7921239509360878, + "grad_norm": 1.4508801457305456, + "learning_rate": 0.00018405, + "loss": 2.4847, + "step": 1227 + }, + { + "epoch": 0.7927695287282117, + "grad_norm": 1.4180929913021445, + "learning_rate": 0.00018419999999999998, + "loss": 2.7251, + "step": 1228 + }, + { + "epoch": 0.7934151065203358, + "grad_norm": 1.6814019599337393, + "learning_rate": 0.00018435, + "loss": 2.8335, + "step": 1229 + }, + { + "epoch": 0.7940606843124597, + "grad_norm": 1.436715938448575, + "learning_rate": 0.00018449999999999999, + "loss": 2.8169, + "step": 1230 + }, + { + "epoch": 0.7947062621045836, + "grad_norm": 1.505575418330224, + "learning_rate": 0.00018465, + "loss": 2.6966, + "step": 1231 + }, + { + "epoch": 0.7953518398967075, + "grad_norm": 1.491581588418393, + "learning_rate": 0.0001848, + "loss": 2.6492, + "step": 1232 + }, + { + "epoch": 0.7959974176888315, + "grad_norm": 1.3786450469453366, + "learning_rate": 0.00018495, + "loss": 2.7402, + "step": 1233 + }, + { + "epoch": 0.7966429954809554, + "grad_norm": 1.556314852034487, + "learning_rate": 0.00018509999999999997, + "loss": 2.6877, + "step": 1234 + }, + { + "epoch": 0.7972885732730794, + "grad_norm": 1.7736877400112154, + "learning_rate": 0.00018525, + "loss": 2.2372, + "step": 1235 + }, + { + "epoch": 0.7979341510652034, + "grad_norm": 1.2449760404687378, + "learning_rate": 0.00018539999999999998, + "loss": 2.3993, + "step": 1236 + }, + { + "epoch": 0.7985797288573273, + "grad_norm": 1.409469899073388, + "learning_rate": 0.00018555, + "loss": 2.7232, + "step": 1237 + }, + { + "epoch": 0.7992253066494512, + "grad_norm": 1.421267645745174, + "learning_rate": 0.0001857, + "loss": 2.5831, + "step": 1238 + }, + { + "epoch": 0.7998708844415752, + "grad_norm": 1.2861365908214664, + "learning_rate": 0.00018585, + "loss": 2.4773, + "step": 1239 + }, + { + "epoch": 0.8005164622336992, + "grad_norm": 1.115113319713233, + "learning_rate": 0.000186, + "loss": 2.4353, + "step": 1240 + }, + { + "epoch": 0.8011620400258231, + "grad_norm": 1.2607275790242092, + "learning_rate": 0.00018615, + "loss": 2.5185, + "step": 1241 + }, + { + "epoch": 0.8018076178179471, + "grad_norm": 1.32582950262207, + "learning_rate": 0.00018629999999999997, + "loss": 2.4517, + "step": 1242 + }, + { + "epoch": 0.802453195610071, + "grad_norm": 1.2718032456444195, + "learning_rate": 0.00018645, + "loss": 2.6362, + "step": 1243 + }, + { + "epoch": 0.8030987734021949, + "grad_norm": 1.6364673166153576, + "learning_rate": 0.00018659999999999998, + "loss": 2.7671, + "step": 1244 + }, + { + "epoch": 0.8037443511943189, + "grad_norm": 1.3096242018448678, + "learning_rate": 0.00018675, + "loss": 2.4791, + "step": 1245 + }, + { + "epoch": 0.8043899289864429, + "grad_norm": 1.3640167361565998, + "learning_rate": 0.0001869, + "loss": 2.5467, + "step": 1246 + }, + { + "epoch": 0.8050355067785668, + "grad_norm": 1.445547502889608, + "learning_rate": 0.00018705, + "loss": 2.5863, + "step": 1247 + }, + { + "epoch": 0.8056810845706908, + "grad_norm": 1.2469016317782837, + "learning_rate": 0.0001872, + "loss": 2.3303, + "step": 1248 + }, + { + "epoch": 0.8063266623628147, + "grad_norm": 1.3384708271739427, + "learning_rate": 0.00018735, + "loss": 2.5738, + "step": 1249 + }, + { + "epoch": 0.8069722401549386, + "grad_norm": 1.4104071867463621, + "learning_rate": 0.00018749999999999998, + "loss": 2.5098, + "step": 1250 + }, + { + "epoch": 0.8076178179470627, + "grad_norm": 1.271132449479795, + "learning_rate": 0.00018764999999999998, + "loss": 2.1538, + "step": 1251 + }, + { + "epoch": 0.8082633957391866, + "grad_norm": 1.3446919885815705, + "learning_rate": 0.00018779999999999998, + "loss": 2.6199, + "step": 1252 + }, + { + "epoch": 0.8089089735313105, + "grad_norm": 1.380108764695665, + "learning_rate": 0.00018794999999999996, + "loss": 2.5542, + "step": 1253 + }, + { + "epoch": 0.8095545513234345, + "grad_norm": 1.4518671759246835, + "learning_rate": 0.0001881, + "loss": 2.7039, + "step": 1254 + }, + { + "epoch": 0.8102001291155584, + "grad_norm": 1.4174456965587703, + "learning_rate": 0.00018824999999999997, + "loss": 2.846, + "step": 1255 + }, + { + "epoch": 0.8108457069076824, + "grad_norm": 1.378293105502784, + "learning_rate": 0.00018839999999999997, + "loss": 2.8399, + "step": 1256 + }, + { + "epoch": 0.8114912846998064, + "grad_norm": 1.2948590831983033, + "learning_rate": 0.00018854999999999998, + "loss": 2.6387, + "step": 1257 + }, + { + "epoch": 0.8121368624919303, + "grad_norm": 1.4027212131542444, + "learning_rate": 0.00018869999999999998, + "loss": 2.5049, + "step": 1258 + }, + { + "epoch": 0.8127824402840542, + "grad_norm": 1.3105929467958704, + "learning_rate": 0.00018884999999999996, + "loss": 2.538, + "step": 1259 + }, + { + "epoch": 0.8134280180761781, + "grad_norm": 1.388889860742638, + "learning_rate": 0.00018899999999999999, + "loss": 2.5327, + "step": 1260 + }, + { + "epoch": 0.8140735958683021, + "grad_norm": 1.6139298322159092, + "learning_rate": 0.00018914999999999996, + "loss": 2.6691, + "step": 1261 + }, + { + "epoch": 0.8147191736604261, + "grad_norm": 1.344603837098755, + "learning_rate": 0.0001893, + "loss": 2.3926, + "step": 1262 + }, + { + "epoch": 0.81536475145255, + "grad_norm": 1.4095559971960634, + "learning_rate": 0.00018944999999999997, + "loss": 2.9021, + "step": 1263 + }, + { + "epoch": 0.816010329244674, + "grad_norm": 1.5469040945009873, + "learning_rate": 0.00018959999999999997, + "loss": 2.8585, + "step": 1264 + }, + { + "epoch": 0.8166559070367979, + "grad_norm": 1.3928589028497114, + "learning_rate": 0.00018974999999999998, + "loss": 2.1121, + "step": 1265 + }, + { + "epoch": 0.8173014848289218, + "grad_norm": 1.3763492834193138, + "learning_rate": 0.00018989999999999998, + "loss": 2.6714, + "step": 1266 + }, + { + "epoch": 0.8179470626210459, + "grad_norm": 1.2170597573156205, + "learning_rate": 0.00019004999999999996, + "loss": 2.4506, + "step": 1267 + }, + { + "epoch": 0.8185926404131698, + "grad_norm": 1.4321353323940555, + "learning_rate": 0.0001902, + "loss": 2.6214, + "step": 1268 + }, + { + "epoch": 0.8192382182052937, + "grad_norm": 1.4720550199381945, + "learning_rate": 0.00019034999999999996, + "loss": 2.324, + "step": 1269 + }, + { + "epoch": 0.8198837959974177, + "grad_norm": 1.4741140009567362, + "learning_rate": 0.0001905, + "loss": 2.5419, + "step": 1270 + }, + { + "epoch": 0.8205293737895416, + "grad_norm": 1.388328343654612, + "learning_rate": 0.00019064999999999997, + "loss": 2.2622, + "step": 1271 + }, + { + "epoch": 0.8211749515816655, + "grad_norm": 1.6604358405039021, + "learning_rate": 0.00019079999999999998, + "loss": 2.4525, + "step": 1272 + }, + { + "epoch": 0.8218205293737896, + "grad_norm": 1.3954473638037117, + "learning_rate": 0.00019094999999999998, + "loss": 2.4569, + "step": 1273 + }, + { + "epoch": 0.8224661071659135, + "grad_norm": 1.2860537154932785, + "learning_rate": 0.00019109999999999998, + "loss": 2.0083, + "step": 1274 + }, + { + "epoch": 0.8231116849580374, + "grad_norm": 1.3300002137101459, + "learning_rate": 0.00019124999999999996, + "loss": 2.4408, + "step": 1275 + }, + { + "epoch": 0.8237572627501614, + "grad_norm": 1.4581210123307036, + "learning_rate": 0.0001914, + "loss": 2.5312, + "step": 1276 + }, + { + "epoch": 0.8244028405422853, + "grad_norm": 1.419866680528342, + "learning_rate": 0.00019154999999999997, + "loss": 2.4862, + "step": 1277 + }, + { + "epoch": 0.8250484183344093, + "grad_norm": 1.5202585395525812, + "learning_rate": 0.0001917, + "loss": 2.5537, + "step": 1278 + }, + { + "epoch": 0.8256939961265333, + "grad_norm": 1.2031129243527559, + "learning_rate": 0.00019184999999999997, + "loss": 2.2859, + "step": 1279 + }, + { + "epoch": 0.8263395739186572, + "grad_norm": 1.2792975261861574, + "learning_rate": 0.00019199999999999998, + "loss": 2.4474, + "step": 1280 + }, + { + "epoch": 0.8269851517107811, + "grad_norm": 1.2968099200884036, + "learning_rate": 0.00019214999999999998, + "loss": 2.4168, + "step": 1281 + }, + { + "epoch": 0.8276307295029051, + "grad_norm": 1.4556646998727627, + "learning_rate": 0.00019229999999999999, + "loss": 2.4242, + "step": 1282 + }, + { + "epoch": 0.8282763072950291, + "grad_norm": 1.2865019111978058, + "learning_rate": 0.00019244999999999996, + "loss": 2.4677, + "step": 1283 + }, + { + "epoch": 0.828921885087153, + "grad_norm": 1.372327312019297, + "learning_rate": 0.0001926, + "loss": 2.7849, + "step": 1284 + }, + { + "epoch": 0.829567462879277, + "grad_norm": 1.3678366010689467, + "learning_rate": 0.00019274999999999997, + "loss": 2.4853, + "step": 1285 + }, + { + "epoch": 0.8302130406714009, + "grad_norm": 1.3135890044007823, + "learning_rate": 0.0001929, + "loss": 2.7572, + "step": 1286 + }, + { + "epoch": 0.8308586184635248, + "grad_norm": 1.2916292598189634, + "learning_rate": 0.00019304999999999998, + "loss": 2.4231, + "step": 1287 + }, + { + "epoch": 0.8315041962556488, + "grad_norm": 1.2983298093019304, + "learning_rate": 0.00019319999999999998, + "loss": 2.5774, + "step": 1288 + }, + { + "epoch": 0.8321497740477728, + "grad_norm": 1.3189123025688825, + "learning_rate": 0.00019334999999999998, + "loss": 2.2492, + "step": 1289 + }, + { + "epoch": 0.8327953518398967, + "grad_norm": 1.4192340464790176, + "learning_rate": 0.0001935, + "loss": 2.6477, + "step": 1290 + }, + { + "epoch": 0.8334409296320207, + "grad_norm": 1.2857078369939867, + "learning_rate": 0.00019364999999999996, + "loss": 2.4351, + "step": 1291 + }, + { + "epoch": 0.8340865074241446, + "grad_norm": 1.3843034074595058, + "learning_rate": 0.0001938, + "loss": 2.3637, + "step": 1292 + }, + { + "epoch": 0.8347320852162685, + "grad_norm": 1.4364828348757221, + "learning_rate": 0.00019394999999999997, + "loss": 2.4458, + "step": 1293 + }, + { + "epoch": 0.8353776630083926, + "grad_norm": 1.4761358191382155, + "learning_rate": 0.0001941, + "loss": 2.5493, + "step": 1294 + }, + { + "epoch": 0.8360232408005165, + "grad_norm": 1.3854394811220847, + "learning_rate": 0.00019424999999999998, + "loss": 2.3187, + "step": 1295 + }, + { + "epoch": 0.8366688185926404, + "grad_norm": 1.2591381147944893, + "learning_rate": 0.00019439999999999998, + "loss": 2.3872, + "step": 1296 + }, + { + "epoch": 0.8373143963847643, + "grad_norm": 1.2978417112415155, + "learning_rate": 0.00019454999999999999, + "loss": 2.0964, + "step": 1297 + }, + { + "epoch": 0.8379599741768883, + "grad_norm": 1.2583403299720013, + "learning_rate": 0.0001947, + "loss": 2.4698, + "step": 1298 + }, + { + "epoch": 0.8386055519690123, + "grad_norm": 1.3088273051790296, + "learning_rate": 0.00019484999999999997, + "loss": 2.615, + "step": 1299 + }, + { + "epoch": 0.8392511297611362, + "grad_norm": 1.2229388653123643, + "learning_rate": 0.000195, + "loss": 2.6213, + "step": 1300 + }, + { + "epoch": 0.8398967075532602, + "grad_norm": 1.2802362096441497, + "learning_rate": 0.00019514999999999997, + "loss": 2.4882, + "step": 1301 + }, + { + "epoch": 0.8405422853453841, + "grad_norm": 1.2673934939123568, + "learning_rate": 0.00019529999999999998, + "loss": 2.5201, + "step": 1302 + }, + { + "epoch": 0.841187863137508, + "grad_norm": 1.381371050138024, + "learning_rate": 0.00019544999999999998, + "loss": 2.4324, + "step": 1303 + }, + { + "epoch": 0.841833440929632, + "grad_norm": 1.3828755157892725, + "learning_rate": 0.00019559999999999998, + "loss": 2.6332, + "step": 1304 + }, + { + "epoch": 0.842479018721756, + "grad_norm": 1.2027021677442744, + "learning_rate": 0.00019574999999999996, + "loss": 2.5269, + "step": 1305 + }, + { + "epoch": 0.8431245965138799, + "grad_norm": 1.305468455444562, + "learning_rate": 0.0001959, + "loss": 2.4337, + "step": 1306 + }, + { + "epoch": 0.8437701743060039, + "grad_norm": 1.2648018721836307, + "learning_rate": 0.00019604999999999997, + "loss": 2.3354, + "step": 1307 + }, + { + "epoch": 0.8444157520981278, + "grad_norm": 1.2234590281755595, + "learning_rate": 0.0001962, + "loss": 2.5377, + "step": 1308 + }, + { + "epoch": 0.8450613298902517, + "grad_norm": 1.3996441344651345, + "learning_rate": 0.00019634999999999998, + "loss": 2.1612, + "step": 1309 + }, + { + "epoch": 0.8457069076823758, + "grad_norm": 1.203806581071574, + "learning_rate": 0.00019649999999999998, + "loss": 2.2878, + "step": 1310 + }, + { + "epoch": 0.8463524854744997, + "grad_norm": 1.3386523017115488, + "learning_rate": 0.00019664999999999998, + "loss": 2.6666, + "step": 1311 + }, + { + "epoch": 0.8469980632666236, + "grad_norm": 1.327878880798953, + "learning_rate": 0.00019679999999999999, + "loss": 2.6256, + "step": 1312 + }, + { + "epoch": 0.8476436410587476, + "grad_norm": 1.6151601001949119, + "learning_rate": 0.00019694999999999996, + "loss": 2.5343, + "step": 1313 + }, + { + "epoch": 0.8482892188508715, + "grad_norm": 1.4577366930234958, + "learning_rate": 0.0001971, + "loss": 2.4125, + "step": 1314 + }, + { + "epoch": 0.8489347966429954, + "grad_norm": 1.2906087718240122, + "learning_rate": 0.00019724999999999997, + "loss": 2.4991, + "step": 1315 + }, + { + "epoch": 0.8495803744351195, + "grad_norm": 1.2493799183430738, + "learning_rate": 0.0001974, + "loss": 2.6816, + "step": 1316 + }, + { + "epoch": 0.8502259522272434, + "grad_norm": 1.4530195974934257, + "learning_rate": 0.00019754999999999998, + "loss": 2.507, + "step": 1317 + }, + { + "epoch": 0.8508715300193673, + "grad_norm": 1.3342553955484557, + "learning_rate": 0.00019769999999999998, + "loss": 2.5971, + "step": 1318 + }, + { + "epoch": 0.8515171078114913, + "grad_norm": 1.2576482523678099, + "learning_rate": 0.00019784999999999998, + "loss": 2.5889, + "step": 1319 + }, + { + "epoch": 0.8521626856036152, + "grad_norm": 1.311497937813287, + "learning_rate": 0.000198, + "loss": 2.6813, + "step": 1320 + }, + { + "epoch": 0.8528082633957392, + "grad_norm": 1.3210222675811198, + "learning_rate": 0.00019814999999999996, + "loss": 2.5464, + "step": 1321 + }, + { + "epoch": 0.8534538411878632, + "grad_norm": 1.1868403517686783, + "learning_rate": 0.0001983, + "loss": 2.5039, + "step": 1322 + }, + { + "epoch": 0.8540994189799871, + "grad_norm": 1.1980565665890777, + "learning_rate": 0.00019844999999999997, + "loss": 2.5235, + "step": 1323 + }, + { + "epoch": 0.854744996772111, + "grad_norm": 1.3888964565920412, + "learning_rate": 0.0001986, + "loss": 2.682, + "step": 1324 + }, + { + "epoch": 0.855390574564235, + "grad_norm": 1.3204813090289367, + "learning_rate": 0.00019874999999999998, + "loss": 2.1857, + "step": 1325 + }, + { + "epoch": 0.856036152356359, + "grad_norm": 1.4434856398075926, + "learning_rate": 0.00019889999999999998, + "loss": 2.6133, + "step": 1326 + }, + { + "epoch": 0.8566817301484829, + "grad_norm": 1.2637734072449232, + "learning_rate": 0.00019905, + "loss": 2.1059, + "step": 1327 + }, + { + "epoch": 0.8573273079406069, + "grad_norm": 1.123784133423751, + "learning_rate": 0.0001992, + "loss": 2.222, + "step": 1328 + }, + { + "epoch": 0.8579728857327308, + "grad_norm": 1.376450960432389, + "learning_rate": 0.00019934999999999997, + "loss": 2.6344, + "step": 1329 + }, + { + "epoch": 0.8586184635248547, + "grad_norm": 1.3132176046606445, + "learning_rate": 0.0001995, + "loss": 2.4104, + "step": 1330 + }, + { + "epoch": 0.8592640413169786, + "grad_norm": 1.4082020584981443, + "learning_rate": 0.00019964999999999997, + "loss": 2.6345, + "step": 1331 + }, + { + "epoch": 0.8599096191091027, + "grad_norm": 1.5866323354665919, + "learning_rate": 0.0001998, + "loss": 2.5558, + "step": 1332 + }, + { + "epoch": 0.8605551969012266, + "grad_norm": 1.8249780731793932, + "learning_rate": 0.00019994999999999998, + "loss": 2.4716, + "step": 1333 + }, + { + "epoch": 0.8612007746933505, + "grad_norm": 1.5377253542642304, + "learning_rate": 0.00020009999999999998, + "loss": 2.6513, + "step": 1334 + }, + { + "epoch": 0.8618463524854745, + "grad_norm": 1.2661604167835585, + "learning_rate": 0.00020025, + "loss": 2.6147, + "step": 1335 + }, + { + "epoch": 0.8624919302775984, + "grad_norm": 1.3674857835507521, + "learning_rate": 0.0002004, + "loss": 2.5043, + "step": 1336 + }, + { + "epoch": 0.8631375080697224, + "grad_norm": 1.4519060991120742, + "learning_rate": 0.00020054999999999997, + "loss": 2.5002, + "step": 1337 + }, + { + "epoch": 0.8637830858618464, + "grad_norm": 1.559687651494554, + "learning_rate": 0.0002007, + "loss": 2.4595, + "step": 1338 + }, + { + "epoch": 0.8644286636539703, + "grad_norm": 1.2568628700951903, + "learning_rate": 0.00020084999999999998, + "loss": 2.2561, + "step": 1339 + }, + { + "epoch": 0.8650742414460942, + "grad_norm": 1.2610060236878116, + "learning_rate": 0.000201, + "loss": 2.5269, + "step": 1340 + }, + { + "epoch": 0.8657198192382182, + "grad_norm": 1.2831199189308604, + "learning_rate": 0.00020114999999999998, + "loss": 2.4321, + "step": 1341 + }, + { + "epoch": 0.8663653970303422, + "grad_norm": 1.3131956165639895, + "learning_rate": 0.0002013, + "loss": 2.6731, + "step": 1342 + }, + { + "epoch": 0.8670109748224661, + "grad_norm": 1.4659377428443348, + "learning_rate": 0.00020145, + "loss": 2.5632, + "step": 1343 + }, + { + "epoch": 0.8676565526145901, + "grad_norm": 1.611736927362186, + "learning_rate": 0.0002016, + "loss": 2.5311, + "step": 1344 + }, + { + "epoch": 0.868302130406714, + "grad_norm": 1.3136411320419676, + "learning_rate": 0.00020174999999999997, + "loss": 2.6961, + "step": 1345 + }, + { + "epoch": 0.8689477081988379, + "grad_norm": 1.290472903677403, + "learning_rate": 0.0002019, + "loss": 2.1267, + "step": 1346 + }, + { + "epoch": 0.8695932859909619, + "grad_norm": 1.2902035132802807, + "learning_rate": 0.00020204999999999998, + "loss": 2.3489, + "step": 1347 + }, + { + "epoch": 0.8702388637830859, + "grad_norm": 1.5403090784627427, + "learning_rate": 0.0002022, + "loss": 2.7121, + "step": 1348 + }, + { + "epoch": 0.8708844415752098, + "grad_norm": 1.29683409637717, + "learning_rate": 0.00020234999999999999, + "loss": 2.7612, + "step": 1349 + }, + { + "epoch": 0.8715300193673338, + "grad_norm": 1.1926666512088226, + "learning_rate": 0.0002025, + "loss": 2.5297, + "step": 1350 + }, + { + "epoch": 0.8721755971594577, + "grad_norm": 1.187635532816533, + "learning_rate": 0.00020264999999999997, + "loss": 2.3847, + "step": 1351 + }, + { + "epoch": 0.8728211749515816, + "grad_norm": 1.2695255067043254, + "learning_rate": 0.0002028, + "loss": 2.5115, + "step": 1352 + }, + { + "epoch": 0.8734667527437057, + "grad_norm": 1.4052015488613494, + "learning_rate": 0.00020294999999999997, + "loss": 2.0745, + "step": 1353 + }, + { + "epoch": 0.8741123305358296, + "grad_norm": 1.3606331200018469, + "learning_rate": 0.0002031, + "loss": 2.6925, + "step": 1354 + }, + { + "epoch": 0.8747579083279535, + "grad_norm": 1.4797200585710861, + "learning_rate": 0.00020324999999999998, + "loss": 2.6619, + "step": 1355 + }, + { + "epoch": 0.8754034861200775, + "grad_norm": 1.4802963164352774, + "learning_rate": 0.00020339999999999998, + "loss": 2.619, + "step": 1356 + }, + { + "epoch": 0.8760490639122014, + "grad_norm": 1.3513122102922788, + "learning_rate": 0.00020355, + "loss": 2.6149, + "step": 1357 + }, + { + "epoch": 0.8766946417043253, + "grad_norm": 1.3646176299435273, + "learning_rate": 0.0002037, + "loss": 2.1784, + "step": 1358 + }, + { + "epoch": 0.8773402194964494, + "grad_norm": 1.2387886527917604, + "learning_rate": 0.00020384999999999997, + "loss": 2.6526, + "step": 1359 + }, + { + "epoch": 0.8779857972885733, + "grad_norm": 1.2057846123908569, + "learning_rate": 0.000204, + "loss": 2.6746, + "step": 1360 + }, + { + "epoch": 0.8786313750806972, + "grad_norm": 1.2614653825812892, + "learning_rate": 0.00020414999999999997, + "loss": 2.6323, + "step": 1361 + }, + { + "epoch": 0.8792769528728211, + "grad_norm": 1.457645355600834, + "learning_rate": 0.0002043, + "loss": 2.6738, + "step": 1362 + }, + { + "epoch": 0.8799225306649451, + "grad_norm": 1.1418750993911648, + "learning_rate": 0.00020444999999999998, + "loss": 2.213, + "step": 1363 + }, + { + "epoch": 0.8805681084570691, + "grad_norm": 1.192982255092795, + "learning_rate": 0.00020459999999999999, + "loss": 2.4479, + "step": 1364 + }, + { + "epoch": 0.881213686249193, + "grad_norm": 1.1516319164858657, + "learning_rate": 0.00020475, + "loss": 2.4095, + "step": 1365 + }, + { + "epoch": 0.881859264041317, + "grad_norm": 1.5030660924727268, + "learning_rate": 0.0002049, + "loss": 2.7249, + "step": 1366 + }, + { + "epoch": 0.8825048418334409, + "grad_norm": 1.3485715288149598, + "learning_rate": 0.00020504999999999997, + "loss": 2.4414, + "step": 1367 + }, + { + "epoch": 0.8831504196255648, + "grad_norm": 1.3696816843216097, + "learning_rate": 0.0002052, + "loss": 2.1949, + "step": 1368 + }, + { + "epoch": 0.8837959974176889, + "grad_norm": 1.2115017835696058, + "learning_rate": 0.00020534999999999998, + "loss": 2.3064, + "step": 1369 + }, + { + "epoch": 0.8844415752098128, + "grad_norm": 1.4160909503981303, + "learning_rate": 0.0002055, + "loss": 2.6365, + "step": 1370 + }, + { + "epoch": 0.8850871530019367, + "grad_norm": 1.370810315878976, + "learning_rate": 0.00020564999999999998, + "loss": 2.6735, + "step": 1371 + }, + { + "epoch": 0.8857327307940607, + "grad_norm": 1.2145484597715142, + "learning_rate": 0.0002058, + "loss": 2.2718, + "step": 1372 + }, + { + "epoch": 0.8863783085861846, + "grad_norm": 1.2998185009199914, + "learning_rate": 0.00020595, + "loss": 2.4577, + "step": 1373 + }, + { + "epoch": 0.8870238863783085, + "grad_norm": 1.3828288622145108, + "learning_rate": 0.0002061, + "loss": 2.5945, + "step": 1374 + }, + { + "epoch": 0.8876694641704326, + "grad_norm": 1.4028661364397033, + "learning_rate": 0.00020624999999999997, + "loss": 2.4537, + "step": 1375 + }, + { + "epoch": 0.8883150419625565, + "grad_norm": 1.229106311102802, + "learning_rate": 0.00020639999999999998, + "loss": 2.5046, + "step": 1376 + }, + { + "epoch": 0.8889606197546804, + "grad_norm": 1.3546889521360337, + "learning_rate": 0.00020654999999999998, + "loss": 2.6828, + "step": 1377 + }, + { + "epoch": 0.8896061975468044, + "grad_norm": 1.1914336194128716, + "learning_rate": 0.00020669999999999996, + "loss": 2.1809, + "step": 1378 + }, + { + "epoch": 0.8902517753389283, + "grad_norm": 1.2532015140045296, + "learning_rate": 0.00020684999999999999, + "loss": 2.2045, + "step": 1379 + }, + { + "epoch": 0.8908973531310523, + "grad_norm": 1.2047262890273738, + "learning_rate": 0.00020699999999999996, + "loss": 2.4884, + "step": 1380 + }, + { + "epoch": 0.8915429309231763, + "grad_norm": 1.5266818010897354, + "learning_rate": 0.00020715, + "loss": 2.3849, + "step": 1381 + }, + { + "epoch": 0.8921885087153002, + "grad_norm": 1.5177871577659856, + "learning_rate": 0.00020729999999999997, + "loss": 2.6994, + "step": 1382 + }, + { + "epoch": 0.8928340865074241, + "grad_norm": 1.2767442109680671, + "learning_rate": 0.00020744999999999997, + "loss": 2.3116, + "step": 1383 + }, + { + "epoch": 0.8934796642995481, + "grad_norm": 2.6458465202563253, + "learning_rate": 0.00020759999999999998, + "loss": 2.4865, + "step": 1384 + }, + { + "epoch": 0.8941252420916721, + "grad_norm": 1.4769387093993904, + "learning_rate": 0.00020774999999999998, + "loss": 2.5206, + "step": 1385 + }, + { + "epoch": 0.894770819883796, + "grad_norm": 1.5052258063015511, + "learning_rate": 0.00020789999999999996, + "loss": 3.0358, + "step": 1386 + }, + { + "epoch": 0.89541639767592, + "grad_norm": 1.4818541675681163, + "learning_rate": 0.00020805, + "loss": 2.6426, + "step": 1387 + }, + { + "epoch": 0.8960619754680439, + "grad_norm": 1.235854218900693, + "learning_rate": 0.00020819999999999996, + "loss": 2.6754, + "step": 1388 + }, + { + "epoch": 0.8967075532601678, + "grad_norm": 1.2813571568094413, + "learning_rate": 0.00020835, + "loss": 2.4408, + "step": 1389 + }, + { + "epoch": 0.8973531310522918, + "grad_norm": 1.4408822242002073, + "learning_rate": 0.00020849999999999997, + "loss": 2.471, + "step": 1390 + }, + { + "epoch": 0.8979987088444158, + "grad_norm": 1.3167770838622421, + "learning_rate": 0.00020864999999999998, + "loss": 2.6473, + "step": 1391 + }, + { + "epoch": 0.8986442866365397, + "grad_norm": 1.1808643311192109, + "learning_rate": 0.00020879999999999998, + "loss": 2.3613, + "step": 1392 + }, + { + "epoch": 0.8992898644286637, + "grad_norm": 1.2972285415623834, + "learning_rate": 0.00020894999999999998, + "loss": 2.5763, + "step": 1393 + }, + { + "epoch": 0.8999354422207876, + "grad_norm": 1.347048830272255, + "learning_rate": 0.00020909999999999996, + "loss": 2.5619, + "step": 1394 + }, + { + "epoch": 0.9005810200129115, + "grad_norm": 1.3151142740183441, + "learning_rate": 0.00020925, + "loss": 2.5543, + "step": 1395 + }, + { + "epoch": 0.9012265978050356, + "grad_norm": 1.3619579566366553, + "learning_rate": 0.00020939999999999997, + "loss": 2.5839, + "step": 1396 + }, + { + "epoch": 0.9018721755971595, + "grad_norm": 1.4452618727944928, + "learning_rate": 0.00020955, + "loss": 2.5308, + "step": 1397 + }, + { + "epoch": 0.9025177533892834, + "grad_norm": 1.2454796200055125, + "learning_rate": 0.00020969999999999997, + "loss": 2.3085, + "step": 1398 + }, + { + "epoch": 0.9031633311814073, + "grad_norm": 1.1384775798221394, + "learning_rate": 0.00020984999999999998, + "loss": 2.1089, + "step": 1399 + }, + { + "epoch": 0.9038089089735313, + "grad_norm": 1.2222871683505017, + "learning_rate": 0.00020999999999999998, + "loss": 2.2295, + "step": 1400 + }, + { + "epoch": 0.9044544867656552, + "grad_norm": 1.274566569948348, + "learning_rate": 0.00021014999999999999, + "loss": 2.5651, + "step": 1401 + }, + { + "epoch": 0.9051000645577792, + "grad_norm": 1.2907426241391167, + "learning_rate": 0.00021029999999999996, + "loss": 2.6847, + "step": 1402 + }, + { + "epoch": 0.9057456423499032, + "grad_norm": 1.763634197185907, + "learning_rate": 0.00021045, + "loss": 2.6914, + "step": 1403 + }, + { + "epoch": 0.9063912201420271, + "grad_norm": 1.2347146475291992, + "learning_rate": 0.00021059999999999997, + "loss": 2.4414, + "step": 1404 + }, + { + "epoch": 0.907036797934151, + "grad_norm": 1.3346849379420744, + "learning_rate": 0.00021074999999999997, + "loss": 2.7341, + "step": 1405 + }, + { + "epoch": 0.907682375726275, + "grad_norm": 1.232560292704342, + "learning_rate": 0.00021089999999999998, + "loss": 2.531, + "step": 1406 + }, + { + "epoch": 0.908327953518399, + "grad_norm": 1.348708440430082, + "learning_rate": 0.00021104999999999998, + "loss": 2.4484, + "step": 1407 + }, + { + "epoch": 0.9089735313105229, + "grad_norm": 1.1518221284586136, + "learning_rate": 0.00021119999999999996, + "loss": 2.3915, + "step": 1408 + }, + { + "epoch": 0.9096191091026469, + "grad_norm": 1.1963211610636646, + "learning_rate": 0.00021135, + "loss": 2.6014, + "step": 1409 + }, + { + "epoch": 0.9102646868947708, + "grad_norm": 1.251020547193157, + "learning_rate": 0.00021149999999999996, + "loss": 2.4813, + "step": 1410 + }, + { + "epoch": 0.9109102646868947, + "grad_norm": 1.345308108284654, + "learning_rate": 0.00021165, + "loss": 2.2044, + "step": 1411 + }, + { + "epoch": 0.9115558424790188, + "grad_norm": 1.2004158750893017, + "learning_rate": 0.00021179999999999997, + "loss": 2.0347, + "step": 1412 + }, + { + "epoch": 0.9122014202711427, + "grad_norm": 1.1785877582441506, + "learning_rate": 0.00021194999999999997, + "loss": 2.4814, + "step": 1413 + }, + { + "epoch": 0.9128469980632666, + "grad_norm": 1.3039275417844283, + "learning_rate": 0.00021209999999999998, + "loss": 2.4543, + "step": 1414 + }, + { + "epoch": 0.9134925758553906, + "grad_norm": 1.3687073411952864, + "learning_rate": 0.00021224999999999998, + "loss": 2.5365, + "step": 1415 + }, + { + "epoch": 0.9141381536475145, + "grad_norm": 1.4936367783653262, + "learning_rate": 0.00021239999999999996, + "loss": 2.4122, + "step": 1416 + }, + { + "epoch": 0.9147837314396384, + "grad_norm": 1.3712902994126506, + "learning_rate": 0.00021255, + "loss": 2.3866, + "step": 1417 + }, + { + "epoch": 0.9154293092317625, + "grad_norm": 1.3379556606766119, + "learning_rate": 0.00021269999999999997, + "loss": 2.671, + "step": 1418 + }, + { + "epoch": 0.9160748870238864, + "grad_norm": 1.3593976454681052, + "learning_rate": 0.00021285, + "loss": 2.4192, + "step": 1419 + }, + { + "epoch": 0.9167204648160103, + "grad_norm": 1.2246212462781694, + "learning_rate": 0.00021299999999999997, + "loss": 2.3995, + "step": 1420 + }, + { + "epoch": 0.9173660426081343, + "grad_norm": 1.2854389426125123, + "learning_rate": 0.00021314999999999998, + "loss": 2.5693, + "step": 1421 + }, + { + "epoch": 0.9180116204002582, + "grad_norm": 1.2966830198088743, + "learning_rate": 0.00021329999999999998, + "loss": 2.4497, + "step": 1422 + }, + { + "epoch": 0.9186571981923822, + "grad_norm": 1.4661489291749894, + "learning_rate": 0.00021344999999999998, + "loss": 2.6662, + "step": 1423 + }, + { + "epoch": 0.9193027759845062, + "grad_norm": 1.119003446390411, + "learning_rate": 0.00021359999999999996, + "loss": 2.3845, + "step": 1424 + }, + { + "epoch": 0.9199483537766301, + "grad_norm": 1.2936295886398927, + "learning_rate": 0.00021375, + "loss": 2.6428, + "step": 1425 + }, + { + "epoch": 0.920593931568754, + "grad_norm": 1.271745861394347, + "learning_rate": 0.00021389999999999997, + "loss": 2.6625, + "step": 1426 + }, + { + "epoch": 0.921239509360878, + "grad_norm": 1.2903192860886012, + "learning_rate": 0.00021405, + "loss": 2.382, + "step": 1427 + }, + { + "epoch": 0.921885087153002, + "grad_norm": 1.3294082981950228, + "learning_rate": 0.00021419999999999998, + "loss": 2.5663, + "step": 1428 + }, + { + "epoch": 0.9225306649451259, + "grad_norm": 1.953447482910483, + "learning_rate": 0.00021434999999999998, + "loss": 2.5314, + "step": 1429 + }, + { + "epoch": 0.9231762427372499, + "grad_norm": 1.1458594883868374, + "learning_rate": 0.00021449999999999998, + "loss": 2.2511, + "step": 1430 + }, + { + "epoch": 0.9238218205293738, + "grad_norm": 1.249728147541206, + "learning_rate": 0.00021464999999999999, + "loss": 2.6422, + "step": 1431 + }, + { + "epoch": 0.9244673983214977, + "grad_norm": 1.2002097484595597, + "learning_rate": 0.00021479999999999996, + "loss": 2.4453, + "step": 1432 + }, + { + "epoch": 0.9251129761136216, + "grad_norm": 1.1994713837176754, + "learning_rate": 0.00021495, + "loss": 2.5155, + "step": 1433 + }, + { + "epoch": 0.9257585539057457, + "grad_norm": 2.207892518984347, + "learning_rate": 0.00021509999999999997, + "loss": 2.176, + "step": 1434 + }, + { + "epoch": 0.9264041316978696, + "grad_norm": 1.2675773989196388, + "learning_rate": 0.00021525, + "loss": 2.5181, + "step": 1435 + }, + { + "epoch": 0.9270497094899935, + "grad_norm": 1.2336860381032533, + "learning_rate": 0.00021539999999999998, + "loss": 2.109, + "step": 1436 + }, + { + "epoch": 0.9276952872821175, + "grad_norm": 1.4845022608079952, + "learning_rate": 0.00021554999999999998, + "loss": 2.4704, + "step": 1437 + }, + { + "epoch": 0.9283408650742414, + "grad_norm": 1.1305958974103514, + "learning_rate": 0.00021569999999999998, + "loss": 2.1671, + "step": 1438 + }, + { + "epoch": 0.9289864428663654, + "grad_norm": 1.2402241289276434, + "learning_rate": 0.00021585, + "loss": 2.3986, + "step": 1439 + }, + { + "epoch": 0.9296320206584894, + "grad_norm": 1.3930018004576703, + "learning_rate": 0.00021599999999999996, + "loss": 2.3875, + "step": 1440 + }, + { + "epoch": 0.9302775984506133, + "grad_norm": 1.451882539133718, + "learning_rate": 0.00021615, + "loss": 2.5193, + "step": 1441 + }, + { + "epoch": 0.9309231762427372, + "grad_norm": 1.3512715239672752, + "learning_rate": 0.00021629999999999997, + "loss": 2.5185, + "step": 1442 + }, + { + "epoch": 0.9315687540348612, + "grad_norm": 1.127326809653529, + "learning_rate": 0.00021645, + "loss": 2.522, + "step": 1443 + }, + { + "epoch": 0.9322143318269851, + "grad_norm": 1.330354068265479, + "learning_rate": 0.00021659999999999998, + "loss": 2.8083, + "step": 1444 + }, + { + "epoch": 0.9328599096191091, + "grad_norm": 1.343860333641666, + "learning_rate": 0.00021674999999999998, + "loss": 2.4376, + "step": 1445 + }, + { + "epoch": 0.9335054874112331, + "grad_norm": 1.4174079563738173, + "learning_rate": 0.0002169, + "loss": 2.0787, + "step": 1446 + }, + { + "epoch": 0.934151065203357, + "grad_norm": 1.2267027635119827, + "learning_rate": 0.00021705, + "loss": 2.631, + "step": 1447 + }, + { + "epoch": 0.9347966429954809, + "grad_norm": 1.2962279683610278, + "learning_rate": 0.00021719999999999997, + "loss": 2.5103, + "step": 1448 + }, + { + "epoch": 0.9354422207876049, + "grad_norm": 1.5198069996759231, + "learning_rate": 0.00021735, + "loss": 2.5859, + "step": 1449 + }, + { + "epoch": 0.9360877985797289, + "grad_norm": 1.2406816934726037, + "learning_rate": 0.00021749999999999997, + "loss": 2.3031, + "step": 1450 + }, + { + "epoch": 0.9367333763718528, + "grad_norm": 1.2779164143210144, + "learning_rate": 0.00021764999999999998, + "loss": 2.4592, + "step": 1451 + }, + { + "epoch": 0.9373789541639768, + "grad_norm": 1.4232048790935428, + "learning_rate": 0.00021779999999999998, + "loss": 2.6191, + "step": 1452 + }, + { + "epoch": 0.9380245319561007, + "grad_norm": 1.3043321905742051, + "learning_rate": 0.00021794999999999999, + "loss": 2.5692, + "step": 1453 + }, + { + "epoch": 0.9386701097482246, + "grad_norm": 1.2636195555374692, + "learning_rate": 0.00021809999999999996, + "loss": 2.5139, + "step": 1454 + }, + { + "epoch": 0.9393156875403487, + "grad_norm": 1.332158146401949, + "learning_rate": 0.00021825, + "loss": 2.7699, + "step": 1455 + }, + { + "epoch": 0.9399612653324726, + "grad_norm": 1.313449347680927, + "learning_rate": 0.00021839999999999997, + "loss": 2.5805, + "step": 1456 + }, + { + "epoch": 0.9406068431245965, + "grad_norm": 1.5104728516586363, + "learning_rate": 0.00021855, + "loss": 2.599, + "step": 1457 + }, + { + "epoch": 0.9412524209167205, + "grad_norm": 1.2085854300310808, + "learning_rate": 0.00021869999999999998, + "loss": 2.4261, + "step": 1458 + }, + { + "epoch": 0.9418979987088444, + "grad_norm": 1.3585036352005884, + "learning_rate": 0.00021884999999999998, + "loss": 2.2047, + "step": 1459 + }, + { + "epoch": 0.9425435765009683, + "grad_norm": 1.388929457402109, + "learning_rate": 0.00021899999999999998, + "loss": 2.6751, + "step": 1460 + }, + { + "epoch": 0.9431891542930924, + "grad_norm": 1.4193287947238276, + "learning_rate": 0.00021915, + "loss": 2.7121, + "step": 1461 + }, + { + "epoch": 0.9438347320852163, + "grad_norm": 1.3007108335017583, + "learning_rate": 0.00021929999999999996, + "loss": 2.6621, + "step": 1462 + }, + { + "epoch": 0.9444803098773402, + "grad_norm": 1.2808784778700761, + "learning_rate": 0.00021945, + "loss": 2.0502, + "step": 1463 + }, + { + "epoch": 0.9451258876694641, + "grad_norm": 1.4638990879742066, + "learning_rate": 0.00021959999999999997, + "loss": 2.4966, + "step": 1464 + }, + { + "epoch": 0.9457714654615881, + "grad_norm": 1.28422397545431, + "learning_rate": 0.00021975, + "loss": 2.2229, + "step": 1465 + }, + { + "epoch": 0.9464170432537121, + "grad_norm": 1.3207958268605686, + "learning_rate": 0.00021989999999999998, + "loss": 2.6012, + "step": 1466 + }, + { + "epoch": 0.947062621045836, + "grad_norm": 1.4860339411168015, + "learning_rate": 0.00022004999999999998, + "loss": 2.7801, + "step": 1467 + }, + { + "epoch": 0.94770819883796, + "grad_norm": 1.5844169061586073, + "learning_rate": 0.00022019999999999999, + "loss": 2.7003, + "step": 1468 + }, + { + "epoch": 0.9483537766300839, + "grad_norm": 1.217811532567045, + "learning_rate": 0.00022035, + "loss": 2.6427, + "step": 1469 + }, + { + "epoch": 0.9489993544222078, + "grad_norm": 1.467186446465158, + "learning_rate": 0.00022049999999999997, + "loss": 2.6331, + "step": 1470 + }, + { + "epoch": 0.9496449322143318, + "grad_norm": 1.3883507141858997, + "learning_rate": 0.00022065, + "loss": 1.9957, + "step": 1471 + }, + { + "epoch": 0.9502905100064558, + "grad_norm": 1.2323369357741094, + "learning_rate": 0.00022079999999999997, + "loss": 2.6273, + "step": 1472 + }, + { + "epoch": 0.9509360877985797, + "grad_norm": 1.3032124591166931, + "learning_rate": 0.00022095, + "loss": 2.1241, + "step": 1473 + }, + { + "epoch": 0.9515816655907037, + "grad_norm": 1.4177393938595189, + "learning_rate": 0.00022109999999999998, + "loss": 2.5043, + "step": 1474 + }, + { + "epoch": 0.9522272433828276, + "grad_norm": 1.273614580337311, + "learning_rate": 0.00022124999999999998, + "loss": 2.646, + "step": 1475 + }, + { + "epoch": 0.9528728211749515, + "grad_norm": 1.3658527252238892, + "learning_rate": 0.0002214, + "loss": 2.6418, + "step": 1476 + }, + { + "epoch": 0.9535183989670756, + "grad_norm": 1.4973572365266261, + "learning_rate": 0.00022155, + "loss": 2.619, + "step": 1477 + }, + { + "epoch": 0.9541639767591995, + "grad_norm": 1.4249889376398623, + "learning_rate": 0.00022169999999999997, + "loss": 2.5151, + "step": 1478 + }, + { + "epoch": 0.9548095545513234, + "grad_norm": 1.3105043023872989, + "learning_rate": 0.00022185, + "loss": 2.4697, + "step": 1479 + }, + { + "epoch": 0.9554551323434474, + "grad_norm": 1.1679655776449327, + "learning_rate": 0.00022199999999999998, + "loss": 2.3605, + "step": 1480 + }, + { + "epoch": 0.9561007101355713, + "grad_norm": 1.3903707370587508, + "learning_rate": 0.00022215, + "loss": 2.7616, + "step": 1481 + }, + { + "epoch": 0.9567462879276953, + "grad_norm": 1.1710170810718963, + "learning_rate": 0.00022229999999999998, + "loss": 2.546, + "step": 1482 + }, + { + "epoch": 0.9573918657198193, + "grad_norm": 1.4945384050442287, + "learning_rate": 0.00022244999999999999, + "loss": 2.5461, + "step": 1483 + }, + { + "epoch": 0.9580374435119432, + "grad_norm": 1.2001596775107444, + "learning_rate": 0.0002226, + "loss": 2.4912, + "step": 1484 + }, + { + "epoch": 0.9586830213040671, + "grad_norm": 1.239676183775333, + "learning_rate": 0.00022275, + "loss": 2.5275, + "step": 1485 + }, + { + "epoch": 0.9593285990961911, + "grad_norm": 1.1624451192139726, + "learning_rate": 0.00022289999999999997, + "loss": 2.0828, + "step": 1486 + }, + { + "epoch": 0.959974176888315, + "grad_norm": 1.220971118347415, + "learning_rate": 0.00022305, + "loss": 2.5636, + "step": 1487 + }, + { + "epoch": 0.960619754680439, + "grad_norm": 1.2352292371682563, + "learning_rate": 0.00022319999999999998, + "loss": 2.4466, + "step": 1488 + }, + { + "epoch": 0.961265332472563, + "grad_norm": 1.1762936091960001, + "learning_rate": 0.00022335, + "loss": 2.5635, + "step": 1489 + }, + { + "epoch": 0.9619109102646869, + "grad_norm": 1.355560763081342, + "learning_rate": 0.00022349999999999998, + "loss": 2.8315, + "step": 1490 + }, + { + "epoch": 0.9625564880568108, + "grad_norm": 1.1373475368254238, + "learning_rate": 0.00022365, + "loss": 2.4245, + "step": 1491 + }, + { + "epoch": 0.9632020658489348, + "grad_norm": 1.316685440784585, + "learning_rate": 0.0002238, + "loss": 2.4459, + "step": 1492 + }, + { + "epoch": 0.9638476436410588, + "grad_norm": 1.479085380912115, + "learning_rate": 0.00022395, + "loss": 2.5186, + "step": 1493 + }, + { + "epoch": 0.9644932214331827, + "grad_norm": 1.2634186644631065, + "learning_rate": 0.00022409999999999997, + "loss": 1.9775, + "step": 1494 + }, + { + "epoch": 0.9651387992253067, + "grad_norm": 1.256844644711922, + "learning_rate": 0.00022425, + "loss": 2.375, + "step": 1495 + }, + { + "epoch": 0.9657843770174306, + "grad_norm": 1.6807223987010902, + "learning_rate": 0.00022439999999999998, + "loss": 2.92, + "step": 1496 + }, + { + "epoch": 0.9664299548095545, + "grad_norm": 1.1811970038640989, + "learning_rate": 0.00022455, + "loss": 2.1023, + "step": 1497 + }, + { + "epoch": 0.9670755326016786, + "grad_norm": 1.3336309231974903, + "learning_rate": 0.0002247, + "loss": 2.6067, + "step": 1498 + }, + { + "epoch": 0.9677211103938025, + "grad_norm": 1.1910362849777216, + "learning_rate": 0.00022485, + "loss": 2.021, + "step": 1499 + }, + { + "epoch": 0.9683666881859264, + "grad_norm": 1.2722526073176672, + "learning_rate": 0.000225, + "loss": 2.586, + "step": 1500 + }, + { + "epoch": 0.9690122659780503, + "grad_norm": 1.2204023013458956, + "learning_rate": 0.00022514999999999997, + "loss": 2.5844, + "step": 1501 + }, + { + "epoch": 0.9696578437701743, + "grad_norm": 1.2247163812311224, + "learning_rate": 0.00022529999999999997, + "loss": 2.6475, + "step": 1502 + }, + { + "epoch": 0.9703034215622982, + "grad_norm": 1.2441403025315123, + "learning_rate": 0.00022544999999999995, + "loss": 2.5347, + "step": 1503 + }, + { + "epoch": 0.9709489993544222, + "grad_norm": 1.190965059862136, + "learning_rate": 0.00022559999999999998, + "loss": 2.5548, + "step": 1504 + }, + { + "epoch": 0.9715945771465462, + "grad_norm": 1.2027374476940804, + "learning_rate": 0.00022574999999999996, + "loss": 2.4684, + "step": 1505 + }, + { + "epoch": 0.9722401549386701, + "grad_norm": 1.296135320839983, + "learning_rate": 0.0002259, + "loss": 2.5079, + "step": 1506 + }, + { + "epoch": 0.972885732730794, + "grad_norm": 1.1182468304290716, + "learning_rate": 0.00022604999999999997, + "loss": 2.1141, + "step": 1507 + }, + { + "epoch": 0.973531310522918, + "grad_norm": 1.1948563044456157, + "learning_rate": 0.00022619999999999997, + "loss": 2.4059, + "step": 1508 + }, + { + "epoch": 0.974176888315042, + "grad_norm": 1.141870694690002, + "learning_rate": 0.00022634999999999997, + "loss": 2.3716, + "step": 1509 + }, + { + "epoch": 0.9748224661071659, + "grad_norm": 1.2071528652876984, + "learning_rate": 0.00022649999999999998, + "loss": 2.4912, + "step": 1510 + }, + { + "epoch": 0.9754680438992899, + "grad_norm": 1.2876357713468034, + "learning_rate": 0.00022664999999999995, + "loss": 2.5004, + "step": 1511 + }, + { + "epoch": 0.9761136216914138, + "grad_norm": 1.2038584323875487, + "learning_rate": 0.00022679999999999998, + "loss": 2.4526, + "step": 1512 + }, + { + "epoch": 0.9767591994835377, + "grad_norm": 1.2037115582739046, + "learning_rate": 0.00022694999999999996, + "loss": 2.1208, + "step": 1513 + }, + { + "epoch": 0.9774047772756617, + "grad_norm": 1.2242228042256524, + "learning_rate": 0.0002271, + "loss": 1.9917, + "step": 1514 + }, + { + "epoch": 0.9780503550677857, + "grad_norm": 1.296020544920621, + "learning_rate": 0.00022724999999999997, + "loss": 2.3116, + "step": 1515 + }, + { + "epoch": 0.9786959328599096, + "grad_norm": 1.435184390750714, + "learning_rate": 0.00022739999999999997, + "loss": 2.6639, + "step": 1516 + }, + { + "epoch": 0.9793415106520336, + "grad_norm": 1.192862787629436, + "learning_rate": 0.00022754999999999997, + "loss": 2.6061, + "step": 1517 + }, + { + "epoch": 0.9799870884441575, + "grad_norm": 1.1921323260110759, + "learning_rate": 0.00022769999999999998, + "loss": 2.3773, + "step": 1518 + }, + { + "epoch": 0.9806326662362814, + "grad_norm": 1.1878732505054932, + "learning_rate": 0.00022784999999999995, + "loss": 2.3536, + "step": 1519 + }, + { + "epoch": 0.9812782440284055, + "grad_norm": 1.1905143954920752, + "learning_rate": 0.00022799999999999999, + "loss": 2.5022, + "step": 1520 + }, + { + "epoch": 0.9819238218205294, + "grad_norm": 1.2824078277645823, + "learning_rate": 0.00022814999999999996, + "loss": 2.6951, + "step": 1521 + }, + { + "epoch": 0.9825693996126533, + "grad_norm": 1.3280416898033076, + "learning_rate": 0.0002283, + "loss": 2.8294, + "step": 1522 + }, + { + "epoch": 0.9832149774047773, + "grad_norm": 1.4102263321036566, + "learning_rate": 0.00022844999999999997, + "loss": 2.5275, + "step": 1523 + }, + { + "epoch": 0.9838605551969012, + "grad_norm": 1.2463983507053793, + "learning_rate": 0.00022859999999999997, + "loss": 2.7615, + "step": 1524 + }, + { + "epoch": 0.9845061329890252, + "grad_norm": 1.11245218823729, + "learning_rate": 0.00022874999999999998, + "loss": 2.2574, + "step": 1525 + }, + { + "epoch": 0.9851517107811492, + "grad_norm": 1.4896312888725796, + "learning_rate": 0.00022889999999999998, + "loss": 2.3803, + "step": 1526 + }, + { + "epoch": 0.9857972885732731, + "grad_norm": 1.2623468566860343, + "learning_rate": 0.00022904999999999996, + "loss": 2.5536, + "step": 1527 + }, + { + "epoch": 0.986442866365397, + "grad_norm": 1.2056402035232028, + "learning_rate": 0.0002292, + "loss": 2.4631, + "step": 1528 + }, + { + "epoch": 0.987088444157521, + "grad_norm": 1.2726280678400756, + "learning_rate": 0.00022934999999999996, + "loss": 2.3478, + "step": 1529 + }, + { + "epoch": 0.9877340219496449, + "grad_norm": 1.173912010931186, + "learning_rate": 0.0002295, + "loss": 2.3473, + "step": 1530 + }, + { + "epoch": 0.9883795997417689, + "grad_norm": 1.2352713509275242, + "learning_rate": 0.00022964999999999997, + "loss": 2.4409, + "step": 1531 + }, + { + "epoch": 0.9890251775338929, + "grad_norm": 1.2244135887219842, + "learning_rate": 0.00022979999999999997, + "loss": 2.3862, + "step": 1532 + }, + { + "epoch": 0.9896707553260168, + "grad_norm": 1.2995471779786556, + "learning_rate": 0.00022994999999999998, + "loss": 2.5415, + "step": 1533 + }, + { + "epoch": 0.9903163331181407, + "grad_norm": 1.2232664919679932, + "learning_rate": 0.00023009999999999998, + "loss": 2.5066, + "step": 1534 + }, + { + "epoch": 0.9909619109102646, + "grad_norm": 1.5481951242449348, + "learning_rate": 0.00023024999999999996, + "loss": 2.3285, + "step": 1535 + }, + { + "epoch": 0.9916074887023887, + "grad_norm": 1.244656299207581, + "learning_rate": 0.0002304, + "loss": 2.3815, + "step": 1536 + }, + { + "epoch": 0.9922530664945126, + "grad_norm": 1.14045903082428, + "learning_rate": 0.00023054999999999997, + "loss": 2.113, + "step": 1537 + }, + { + "epoch": 0.9928986442866365, + "grad_norm": 1.348611560925763, + "learning_rate": 0.0002307, + "loss": 2.4303, + "step": 1538 + }, + { + "epoch": 0.9935442220787605, + "grad_norm": 1.2409839834115757, + "learning_rate": 0.00023084999999999997, + "loss": 2.666, + "step": 1539 + }, + { + "epoch": 0.9941897998708844, + "grad_norm": 1.1750812420359968, + "learning_rate": 0.00023099999999999998, + "loss": 2.0853, + "step": 1540 + }, + { + "epoch": 0.9948353776630084, + "grad_norm": 1.1575159108117288, + "learning_rate": 0.00023114999999999998, + "loss": 2.4074, + "step": 1541 + }, + { + "epoch": 0.9954809554551324, + "grad_norm": 1.1485351964728168, + "learning_rate": 0.00023129999999999998, + "loss": 2.4628, + "step": 1542 + }, + { + "epoch": 0.9961265332472563, + "grad_norm": 1.184723255893178, + "learning_rate": 0.00023144999999999996, + "loss": 2.4224, + "step": 1543 + }, + { + "epoch": 0.9967721110393802, + "grad_norm": 1.1724761767347285, + "learning_rate": 0.0002316, + "loss": 2.4394, + "step": 1544 + }, + { + "epoch": 0.9974176888315042, + "grad_norm": 1.2339916370567938, + "learning_rate": 0.00023174999999999997, + "loss": 2.5936, + "step": 1545 + }, + { + "epoch": 0.9980632666236281, + "grad_norm": 1.212673968330058, + "learning_rate": 0.0002319, + "loss": 2.4926, + "step": 1546 + }, + { + "epoch": 0.9987088444157521, + "grad_norm": 1.1559527983975704, + "learning_rate": 0.00023204999999999998, + "loss": 2.3701, + "step": 1547 + }, + { + "epoch": 0.9993544222078761, + "grad_norm": 1.1489878241930926, + "learning_rate": 0.00023219999999999998, + "loss": 2.3856, + "step": 1548 + }, + { + "epoch": 1.0, + "grad_norm": 1.1166001616790946, + "learning_rate": 0.00023234999999999998, + "loss": 2.4008, + "step": 1549 + }, + { + "epoch": 1.0, + "eval_loss": 2.4034976959228516, + "eval_runtime": 58.5993, + "eval_samples_per_second": 5.922, + "eval_steps_per_second": 5.922, + "step": 1549 + }, + { + "epoch": 1.000645577792124, + "grad_norm": 1.2177308116882508, + "learning_rate": 0.00023249999999999999, + "loss": 2.1143, + "step": 1550 + }, + { + "epoch": 1.0012911555842479, + "grad_norm": 1.3063982958676799, + "learning_rate": 0.00023264999999999996, + "loss": 2.4855, + "step": 1551 + }, + { + "epoch": 1.0019367333763718, + "grad_norm": 1.2624745641939024, + "learning_rate": 0.0002328, + "loss": 2.5745, + "step": 1552 + }, + { + "epoch": 1.0025823111684957, + "grad_norm": 1.2590922740240609, + "learning_rate": 0.00023294999999999997, + "loss": 2.4783, + "step": 1553 + }, + { + "epoch": 1.0032278889606197, + "grad_norm": 1.2895558809540928, + "learning_rate": 0.00023309999999999997, + "loss": 2.5223, + "step": 1554 + }, + { + "epoch": 1.0038734667527438, + "grad_norm": 1.272896618113426, + "learning_rate": 0.00023324999999999998, + "loss": 2.6953, + "step": 1555 + }, + { + "epoch": 1.0045190445448677, + "grad_norm": 1.111974465238009, + "learning_rate": 0.00023339999999999998, + "loss": 2.4062, + "step": 1556 + }, + { + "epoch": 1.0051646223369917, + "grad_norm": 1.169895042811319, + "learning_rate": 0.00023354999999999996, + "loss": 1.9895, + "step": 1557 + }, + { + "epoch": 1.0058102001291156, + "grad_norm": 1.2161015489044618, + "learning_rate": 0.0002337, + "loss": 2.6037, + "step": 1558 + }, + { + "epoch": 1.0064557779212395, + "grad_norm": 1.1873387667600857, + "learning_rate": 0.00023384999999999997, + "loss": 2.4775, + "step": 1559 + }, + { + "epoch": 1.0071013557133635, + "grad_norm": 1.192874438495477, + "learning_rate": 0.000234, + "loss": 2.4906, + "step": 1560 + }, + { + "epoch": 1.0077469335054874, + "grad_norm": 1.174280401774705, + "learning_rate": 0.00023414999999999997, + "loss": 2.1802, + "step": 1561 + }, + { + "epoch": 1.0083925112976113, + "grad_norm": 1.2120699133707253, + "learning_rate": 0.00023429999999999998, + "loss": 2.5631, + "step": 1562 + }, + { + "epoch": 1.0090380890897352, + "grad_norm": 1.2348730396303527, + "learning_rate": 0.00023444999999999998, + "loss": 2.3311, + "step": 1563 + }, + { + "epoch": 1.0096836668818592, + "grad_norm": 1.350834038605271, + "learning_rate": 0.00023459999999999998, + "loss": 2.5539, + "step": 1564 + }, + { + "epoch": 1.010329244673983, + "grad_norm": 1.2904061534522924, + "learning_rate": 0.00023474999999999996, + "loss": 2.4989, + "step": 1565 + }, + { + "epoch": 1.0109748224661073, + "grad_norm": 1.1835455572203535, + "learning_rate": 0.0002349, + "loss": 2.4299, + "step": 1566 + }, + { + "epoch": 1.0116204002582312, + "grad_norm": 1.3107832076792507, + "learning_rate": 0.00023504999999999997, + "loss": 2.5962, + "step": 1567 + }, + { + "epoch": 1.0122659780503551, + "grad_norm": 1.2646867938103157, + "learning_rate": 0.0002352, + "loss": 2.2104, + "step": 1568 + }, + { + "epoch": 1.012911555842479, + "grad_norm": 1.1851651709863023, + "learning_rate": 0.00023534999999999997, + "loss": 2.1021, + "step": 1569 + }, + { + "epoch": 1.013557133634603, + "grad_norm": 1.3019246952566743, + "learning_rate": 0.00023549999999999998, + "loss": 2.4291, + "step": 1570 + }, + { + "epoch": 1.014202711426727, + "grad_norm": 1.1825474736877033, + "learning_rate": 0.00023564999999999998, + "loss": 2.0492, + "step": 1571 + }, + { + "epoch": 1.0148482892188508, + "grad_norm": 1.2971365758690483, + "learning_rate": 0.00023579999999999999, + "loss": 2.611, + "step": 1572 + }, + { + "epoch": 1.0154938670109748, + "grad_norm": 1.2679256323406687, + "learning_rate": 0.00023594999999999996, + "loss": 2.3544, + "step": 1573 + }, + { + "epoch": 1.0161394448030987, + "grad_norm": 1.3246614507586558, + "learning_rate": 0.0002361, + "loss": 2.3892, + "step": 1574 + }, + { + "epoch": 1.0167850225952226, + "grad_norm": 1.25491212747976, + "learning_rate": 0.00023624999999999997, + "loss": 2.3807, + "step": 1575 + }, + { + "epoch": 1.0174306003873468, + "grad_norm": 1.3392470952653277, + "learning_rate": 0.0002364, + "loss": 2.4523, + "step": 1576 + }, + { + "epoch": 1.0180761781794707, + "grad_norm": 1.2867349658574854, + "learning_rate": 0.00023654999999999998, + "loss": 2.1494, + "step": 1577 + }, + { + "epoch": 1.0187217559715946, + "grad_norm": 1.4399166300797452, + "learning_rate": 0.00023669999999999998, + "loss": 2.5072, + "step": 1578 + }, + { + "epoch": 1.0193673337637186, + "grad_norm": 1.3495009523345374, + "learning_rate": 0.00023684999999999998, + "loss": 2.5949, + "step": 1579 + }, + { + "epoch": 1.0200129115558425, + "grad_norm": 1.3405900317202537, + "learning_rate": 0.000237, + "loss": 2.4663, + "step": 1580 + }, + { + "epoch": 1.0206584893479664, + "grad_norm": 1.2686489514547805, + "learning_rate": 0.00023714999999999996, + "loss": 2.4357, + "step": 1581 + }, + { + "epoch": 1.0213040671400904, + "grad_norm": 1.3384979209406649, + "learning_rate": 0.0002373, + "loss": 2.5394, + "step": 1582 + }, + { + "epoch": 1.0219496449322143, + "grad_norm": 1.3309026800051722, + "learning_rate": 0.00023744999999999997, + "loss": 2.5284, + "step": 1583 + }, + { + "epoch": 1.0225952227243382, + "grad_norm": 1.3359158009734435, + "learning_rate": 0.0002376, + "loss": 2.7491, + "step": 1584 + }, + { + "epoch": 1.0232408005164622, + "grad_norm": 1.3006034590175484, + "learning_rate": 0.00023774999999999998, + "loss": 2.6252, + "step": 1585 + }, + { + "epoch": 1.023886378308586, + "grad_norm": 1.3547220106848634, + "learning_rate": 0.00023789999999999998, + "loss": 2.5308, + "step": 1586 + }, + { + "epoch": 1.0245319561007102, + "grad_norm": 1.1964552242772577, + "learning_rate": 0.00023804999999999999, + "loss": 2.4067, + "step": 1587 + }, + { + "epoch": 1.0251775338928342, + "grad_norm": 1.2289741707542923, + "learning_rate": 0.0002382, + "loss": 2.4032, + "step": 1588 + }, + { + "epoch": 1.025823111684958, + "grad_norm": 1.1959881377015387, + "learning_rate": 0.00023834999999999997, + "loss": 2.557, + "step": 1589 + }, + { + "epoch": 1.026468689477082, + "grad_norm": 1.3389131331505706, + "learning_rate": 0.0002385, + "loss": 2.6245, + "step": 1590 + }, + { + "epoch": 1.027114267269206, + "grad_norm": 1.517390291404928, + "learning_rate": 0.00023864999999999997, + "loss": 2.6314, + "step": 1591 + }, + { + "epoch": 1.02775984506133, + "grad_norm": 1.168466051946874, + "learning_rate": 0.0002388, + "loss": 2.1539, + "step": 1592 + }, + { + "epoch": 1.0284054228534538, + "grad_norm": 1.3915164145609975, + "learning_rate": 0.00023894999999999998, + "loss": 2.309, + "step": 1593 + }, + { + "epoch": 1.0290510006455778, + "grad_norm": 1.1974019485304477, + "learning_rate": 0.00023909999999999998, + "loss": 1.9977, + "step": 1594 + }, + { + "epoch": 1.0296965784377017, + "grad_norm": 1.3754167353425164, + "learning_rate": 0.00023925, + "loss": 2.5997, + "step": 1595 + }, + { + "epoch": 1.0303421562298256, + "grad_norm": 1.1642583046599024, + "learning_rate": 0.0002394, + "loss": 2.3826, + "step": 1596 + }, + { + "epoch": 1.0309877340219495, + "grad_norm": 1.1395629997156693, + "learning_rate": 0.00023954999999999997, + "loss": 1.8975, + "step": 1597 + }, + { + "epoch": 1.0316333118140737, + "grad_norm": 1.2690771801577236, + "learning_rate": 0.0002397, + "loss": 2.4569, + "step": 1598 + }, + { + "epoch": 1.0322788896061976, + "grad_norm": 1.3462720473954914, + "learning_rate": 0.00023984999999999998, + "loss": 2.1892, + "step": 1599 + }, + { + "epoch": 1.0329244673983216, + "grad_norm": 1.2092712184125123, + "learning_rate": 0.00023999999999999998, + "loss": 2.5595, + "step": 1600 + }, + { + "epoch": 1.0335700451904455, + "grad_norm": 1.1887258388907243, + "learning_rate": 0.00024014999999999998, + "loss": 1.9455, + "step": 1601 + }, + { + "epoch": 1.0342156229825694, + "grad_norm": 1.2882581242988846, + "learning_rate": 0.00024029999999999999, + "loss": 2.2855, + "step": 1602 + }, + { + "epoch": 1.0348612007746933, + "grad_norm": 1.1199047356623877, + "learning_rate": 0.00024044999999999996, + "loss": 2.1144, + "step": 1603 + }, + { + "epoch": 1.0355067785668173, + "grad_norm": 1.2384733994290118, + "learning_rate": 0.0002406, + "loss": 2.4715, + "step": 1604 + }, + { + "epoch": 1.0361523563589412, + "grad_norm": 1.293838092747275, + "learning_rate": 0.00024074999999999997, + "loss": 2.4752, + "step": 1605 + }, + { + "epoch": 1.0367979341510651, + "grad_norm": 1.3159746523862224, + "learning_rate": 0.0002409, + "loss": 2.1218, + "step": 1606 + }, + { + "epoch": 1.037443511943189, + "grad_norm": 1.3000390621471796, + "learning_rate": 0.00024104999999999998, + "loss": 2.4338, + "step": 1607 + }, + { + "epoch": 1.0380890897353132, + "grad_norm": 1.3544924339041757, + "learning_rate": 0.00024119999999999998, + "loss": 2.5641, + "step": 1608 + }, + { + "epoch": 1.0387346675274371, + "grad_norm": 1.2417821239345128, + "learning_rate": 0.00024134999999999998, + "loss": 2.446, + "step": 1609 + }, + { + "epoch": 1.039380245319561, + "grad_norm": 1.2166341627612631, + "learning_rate": 0.0002415, + "loss": 2.1952, + "step": 1610 + }, + { + "epoch": 1.040025823111685, + "grad_norm": 1.3737198919261175, + "learning_rate": 0.00024164999999999996, + "loss": 2.4927, + "step": 1611 + }, + { + "epoch": 1.040671400903809, + "grad_norm": 1.1569824699014513, + "learning_rate": 0.0002418, + "loss": 2.1434, + "step": 1612 + }, + { + "epoch": 1.0413169786959329, + "grad_norm": 1.2859927503132051, + "learning_rate": 0.00024194999999999997, + "loss": 2.4616, + "step": 1613 + }, + { + "epoch": 1.0419625564880568, + "grad_norm": 1.3862071410828392, + "learning_rate": 0.0002421, + "loss": 1.9603, + "step": 1614 + }, + { + "epoch": 1.0426081342801807, + "grad_norm": 1.1763453294470119, + "learning_rate": 0.00024224999999999998, + "loss": 2.0927, + "step": 1615 + }, + { + "epoch": 1.0432537120723047, + "grad_norm": 1.2864142291453073, + "learning_rate": 0.00024239999999999998, + "loss": 2.4188, + "step": 1616 + }, + { + "epoch": 1.0438992898644286, + "grad_norm": 1.4352793781115047, + "learning_rate": 0.00024255, + "loss": 2.6533, + "step": 1617 + }, + { + "epoch": 1.0445448676565525, + "grad_norm": 1.242150186167682, + "learning_rate": 0.0002427, + "loss": 2.2469, + "step": 1618 + }, + { + "epoch": 1.0451904454486765, + "grad_norm": 1.2220814044923962, + "learning_rate": 0.00024284999999999997, + "loss": 2.1949, + "step": 1619 + }, + { + "epoch": 1.0458360232408006, + "grad_norm": 1.2863464662830697, + "learning_rate": 0.000243, + "loss": 2.213, + "step": 1620 + }, + { + "epoch": 1.0464816010329245, + "grad_norm": 1.231125397958329, + "learning_rate": 0.00024314999999999997, + "loss": 1.9646, + "step": 1621 + }, + { + "epoch": 1.0471271788250485, + "grad_norm": 1.2054935919481764, + "learning_rate": 0.0002433, + "loss": 2.2772, + "step": 1622 + }, + { + "epoch": 1.0477727566171724, + "grad_norm": 1.17611391284568, + "learning_rate": 0.00024344999999999998, + "loss": 2.1781, + "step": 1623 + }, + { + "epoch": 1.0484183344092963, + "grad_norm": 1.2703621880914997, + "learning_rate": 0.00024359999999999999, + "loss": 2.3797, + "step": 1624 + }, + { + "epoch": 1.0490639122014203, + "grad_norm": 1.8816791127086014, + "learning_rate": 0.00024375, + "loss": 1.9979, + "step": 1625 + }, + { + "epoch": 1.0497094899935442, + "grad_norm": 1.3280409496183558, + "learning_rate": 0.00024389999999999997, + "loss": 2.4455, + "step": 1626 + }, + { + "epoch": 1.0503550677856681, + "grad_norm": 1.3227700394582182, + "learning_rate": 0.00024404999999999997, + "loss": 2.7238, + "step": 1627 + }, + { + "epoch": 1.051000645577792, + "grad_norm": 1.0815419177085372, + "learning_rate": 0.00024419999999999997, + "loss": 1.916, + "step": 1628 + }, + { + "epoch": 1.051646223369916, + "grad_norm": 1.2185808391961608, + "learning_rate": 0.00024435, + "loss": 2.5363, + "step": 1629 + }, + { + "epoch": 1.0522918011620401, + "grad_norm": 1.1195481837308134, + "learning_rate": 0.0002445, + "loss": 2.5001, + "step": 1630 + }, + { + "epoch": 1.052937378954164, + "grad_norm": 1.1340419932161754, + "learning_rate": 0.00024464999999999996, + "loss": 2.4053, + "step": 1631 + }, + { + "epoch": 1.053582956746288, + "grad_norm": 1.1731713315564118, + "learning_rate": 0.0002448, + "loss": 2.4608, + "step": 1632 + }, + { + "epoch": 1.054228534538412, + "grad_norm": 1.231852123648813, + "learning_rate": 0.00024494999999999996, + "loss": 2.4688, + "step": 1633 + }, + { + "epoch": 1.0548741123305359, + "grad_norm": 1.1866337979134656, + "learning_rate": 0.00024509999999999994, + "loss": 1.9846, + "step": 1634 + }, + { + "epoch": 1.0555196901226598, + "grad_norm": 1.3096262979569375, + "learning_rate": 0.00024524999999999997, + "loss": 2.4191, + "step": 1635 + }, + { + "epoch": 1.0561652679147837, + "grad_norm": 1.1835317302199486, + "learning_rate": 0.00024539999999999995, + "loss": 2.3731, + "step": 1636 + }, + { + "epoch": 1.0568108457069076, + "grad_norm": 1.2316501858215132, + "learning_rate": 0.00024555, + "loss": 2.5367, + "step": 1637 + }, + { + "epoch": 1.0574564234990316, + "grad_norm": 1.2130085139792195, + "learning_rate": 0.00024569999999999995, + "loss": 2.5777, + "step": 1638 + }, + { + "epoch": 1.0581020012911555, + "grad_norm": 1.2086678497219225, + "learning_rate": 0.00024585, + "loss": 2.455, + "step": 1639 + }, + { + "epoch": 1.0587475790832794, + "grad_norm": 1.1811956363096132, + "learning_rate": 0.00024599999999999996, + "loss": 2.5725, + "step": 1640 + }, + { + "epoch": 1.0593931568754036, + "grad_norm": 1.2557846969999422, + "learning_rate": 0.00024615, + "loss": 2.4428, + "step": 1641 + }, + { + "epoch": 1.0600387346675275, + "grad_norm": 1.2032907074758314, + "learning_rate": 0.00024629999999999997, + "loss": 2.3729, + "step": 1642 + }, + { + "epoch": 1.0606843124596514, + "grad_norm": 1.2008643641042596, + "learning_rate": 0.00024645, + "loss": 2.4777, + "step": 1643 + }, + { + "epoch": 1.0613298902517754, + "grad_norm": 1.2359783648682903, + "learning_rate": 0.0002466, + "loss": 2.1229, + "step": 1644 + }, + { + "epoch": 1.0619754680438993, + "grad_norm": 1.2336081929909863, + "learning_rate": 0.00024675, + "loss": 2.3013, + "step": 1645 + }, + { + "epoch": 1.0626210458360232, + "grad_norm": 1.2244846605794624, + "learning_rate": 0.0002469, + "loss": 2.3265, + "step": 1646 + }, + { + "epoch": 1.0632666236281472, + "grad_norm": 1.321022992625959, + "learning_rate": 0.00024704999999999996, + "loss": 2.3629, + "step": 1647 + }, + { + "epoch": 1.063912201420271, + "grad_norm": 1.37166729715712, + "learning_rate": 0.0002472, + "loss": 2.3088, + "step": 1648 + }, + { + "epoch": 1.064557779212395, + "grad_norm": 1.308949751795605, + "learning_rate": 0.00024734999999999997, + "loss": 2.1505, + "step": 1649 + }, + { + "epoch": 1.065203357004519, + "grad_norm": 1.276049972327084, + "learning_rate": 0.00024749999999999994, + "loss": 2.4359, + "step": 1650 + }, + { + "epoch": 1.065848934796643, + "grad_norm": 1.2637982063547708, + "learning_rate": 0.00024765, + "loss": 2.1892, + "step": 1651 + }, + { + "epoch": 1.066494512588767, + "grad_norm": 1.2549006033375831, + "learning_rate": 0.00024779999999999995, + "loss": 2.2189, + "step": 1652 + }, + { + "epoch": 1.067140090380891, + "grad_norm": 1.204696539515319, + "learning_rate": 0.00024795, + "loss": 2.4818, + "step": 1653 + }, + { + "epoch": 1.067785668173015, + "grad_norm": 1.2787938671936268, + "learning_rate": 0.00024809999999999996, + "loss": 2.3673, + "step": 1654 + }, + { + "epoch": 1.0684312459651388, + "grad_norm": 1.2234894130607936, + "learning_rate": 0.00024825, + "loss": 2.3878, + "step": 1655 + }, + { + "epoch": 1.0690768237572628, + "grad_norm": 1.312045315441348, + "learning_rate": 0.00024839999999999997, + "loss": 2.3247, + "step": 1656 + }, + { + "epoch": 1.0697224015493867, + "grad_norm": 1.3165766968119827, + "learning_rate": 0.00024855, + "loss": 2.7656, + "step": 1657 + }, + { + "epoch": 1.0703679793415106, + "grad_norm": 1.2160955753921012, + "learning_rate": 0.0002487, + "loss": 2.5275, + "step": 1658 + }, + { + "epoch": 1.0710135571336346, + "grad_norm": 1.1496525355212803, + "learning_rate": 0.00024885, + "loss": 2.2351, + "step": 1659 + }, + { + "epoch": 1.0716591349257585, + "grad_norm": 1.2097976084721678, + "learning_rate": 0.000249, + "loss": 2.3726, + "step": 1660 + }, + { + "epoch": 1.0723047127178824, + "grad_norm": 1.1835940400641218, + "learning_rate": 0.00024914999999999996, + "loss": 2.1853, + "step": 1661 + }, + { + "epoch": 1.0729502905100063, + "grad_norm": 1.2096643983791195, + "learning_rate": 0.0002493, + "loss": 2.0582, + "step": 1662 + }, + { + "epoch": 1.0735958683021305, + "grad_norm": 1.2861903219930644, + "learning_rate": 0.00024944999999999996, + "loss": 2.5754, + "step": 1663 + }, + { + "epoch": 1.0742414460942544, + "grad_norm": 1.2388100092190781, + "learning_rate": 0.00024959999999999994, + "loss": 2.4855, + "step": 1664 + }, + { + "epoch": 1.0748870238863784, + "grad_norm": 1.369540314107652, + "learning_rate": 0.00024974999999999997, + "loss": 2.053, + "step": 1665 + }, + { + "epoch": 1.0755326016785023, + "grad_norm": 1.5291192176233894, + "learning_rate": 0.00024989999999999995, + "loss": 2.1532, + "step": 1666 + }, + { + "epoch": 1.0761781794706262, + "grad_norm": 1.0760489944797755, + "learning_rate": 0.00025005, + "loss": 2.0098, + "step": 1667 + }, + { + "epoch": 1.0768237572627501, + "grad_norm": 1.4813146013954546, + "learning_rate": 0.00025019999999999996, + "loss": 2.5657, + "step": 1668 + }, + { + "epoch": 1.077469335054874, + "grad_norm": 1.3107399132286863, + "learning_rate": 0.00025035, + "loss": 2.3769, + "step": 1669 + }, + { + "epoch": 1.078114912846998, + "grad_norm": 1.3884413978825108, + "learning_rate": 0.00025049999999999996, + "loss": 2.382, + "step": 1670 + }, + { + "epoch": 1.078760490639122, + "grad_norm": 1.3760730132656864, + "learning_rate": 0.00025065, + "loss": 2.336, + "step": 1671 + }, + { + "epoch": 1.0794060684312459, + "grad_norm": 1.5160850752074058, + "learning_rate": 0.00025079999999999997, + "loss": 2.3711, + "step": 1672 + }, + { + "epoch": 1.08005164622337, + "grad_norm": 1.3235894958163497, + "learning_rate": 0.00025095, + "loss": 2.2561, + "step": 1673 + }, + { + "epoch": 1.080697224015494, + "grad_norm": 1.4696013723159755, + "learning_rate": 0.0002511, + "loss": 2.7438, + "step": 1674 + }, + { + "epoch": 1.0813428018076179, + "grad_norm": 1.3396904404791188, + "learning_rate": 0.00025125, + "loss": 2.6212, + "step": 1675 + }, + { + "epoch": 1.0819883795997418, + "grad_norm": 1.1917297900862065, + "learning_rate": 0.0002514, + "loss": 2.274, + "step": 1676 + }, + { + "epoch": 1.0826339573918657, + "grad_norm": 1.2186027549478406, + "learning_rate": 0.00025154999999999996, + "loss": 2.3228, + "step": 1677 + }, + { + "epoch": 1.0832795351839897, + "grad_norm": 1.359124722061572, + "learning_rate": 0.0002517, + "loss": 2.4039, + "step": 1678 + }, + { + "epoch": 1.0839251129761136, + "grad_norm": 1.1761758500195878, + "learning_rate": 0.00025184999999999997, + "loss": 2.3107, + "step": 1679 + }, + { + "epoch": 1.0845706907682375, + "grad_norm": 1.1877626194392454, + "learning_rate": 0.00025199999999999995, + "loss": 2.3645, + "step": 1680 + }, + { + "epoch": 1.0852162685603615, + "grad_norm": 1.2429462654620553, + "learning_rate": 0.00025215, + "loss": 2.3149, + "step": 1681 + }, + { + "epoch": 1.0858618463524854, + "grad_norm": 1.2799372170133618, + "learning_rate": 0.00025229999999999995, + "loss": 2.4639, + "step": 1682 + }, + { + "epoch": 1.0865074241446093, + "grad_norm": 1.2401568945352988, + "learning_rate": 0.00025245, + "loss": 2.0539, + "step": 1683 + }, + { + "epoch": 1.0871530019367335, + "grad_norm": 1.2759652950971376, + "learning_rate": 0.00025259999999999996, + "loss": 2.1707, + "step": 1684 + }, + { + "epoch": 1.0877985797288574, + "grad_norm": 0.9781918728364397, + "learning_rate": 0.00025275, + "loss": 1.822, + "step": 1685 + }, + { + "epoch": 1.0884441575209813, + "grad_norm": 1.2554949049644004, + "learning_rate": 0.00025289999999999997, + "loss": 2.469, + "step": 1686 + }, + { + "epoch": 1.0890897353131053, + "grad_norm": 1.187210075668738, + "learning_rate": 0.00025305, + "loss": 1.9303, + "step": 1687 + }, + { + "epoch": 1.0897353131052292, + "grad_norm": 1.2611012510840036, + "learning_rate": 0.0002532, + "loss": 2.4508, + "step": 1688 + }, + { + "epoch": 1.0903808908973531, + "grad_norm": 1.2662142842508062, + "learning_rate": 0.00025335, + "loss": 2.3263, + "step": 1689 + }, + { + "epoch": 1.091026468689477, + "grad_norm": 1.1800056631139053, + "learning_rate": 0.0002535, + "loss": 2.3279, + "step": 1690 + }, + { + "epoch": 1.091672046481601, + "grad_norm": 1.3382161559413372, + "learning_rate": 0.00025365, + "loss": 2.6963, + "step": 1691 + }, + { + "epoch": 1.092317624273725, + "grad_norm": 1.3039916490317578, + "learning_rate": 0.0002538, + "loss": 2.4177, + "step": 1692 + }, + { + "epoch": 1.0929632020658488, + "grad_norm": 1.247680162657801, + "learning_rate": 0.00025394999999999997, + "loss": 2.3498, + "step": 1693 + }, + { + "epoch": 1.093608779857973, + "grad_norm": 1.117786757672093, + "learning_rate": 0.0002541, + "loss": 2.3865, + "step": 1694 + }, + { + "epoch": 1.094254357650097, + "grad_norm": 1.2673825676393833, + "learning_rate": 0.00025425, + "loss": 2.3254, + "step": 1695 + }, + { + "epoch": 1.0948999354422209, + "grad_norm": 1.3667455143506682, + "learning_rate": 0.00025439999999999995, + "loss": 2.4675, + "step": 1696 + }, + { + "epoch": 1.0955455132343448, + "grad_norm": 1.1883561958806796, + "learning_rate": 0.00025455, + "loss": 2.3487, + "step": 1697 + }, + { + "epoch": 1.0961910910264687, + "grad_norm": 1.3925565655846754, + "learning_rate": 0.00025469999999999996, + "loss": 2.3797, + "step": 1698 + }, + { + "epoch": 1.0968366688185927, + "grad_norm": 1.327315046945895, + "learning_rate": 0.00025485, + "loss": 2.4467, + "step": 1699 + }, + { + "epoch": 1.0974822466107166, + "grad_norm": 1.246715615555814, + "learning_rate": 0.00025499999999999996, + "loss": 2.5106, + "step": 1700 + }, + { + "epoch": 1.0981278244028405, + "grad_norm": 1.366172751270056, + "learning_rate": 0.00025515, + "loss": 2.2507, + "step": 1701 + }, + { + "epoch": 1.0987734021949644, + "grad_norm": 1.582799928516037, + "learning_rate": 0.00025529999999999997, + "loss": 2.836, + "step": 1702 + }, + { + "epoch": 1.0994189799870884, + "grad_norm": 1.2707806205229357, + "learning_rate": 0.00025545, + "loss": 2.6931, + "step": 1703 + }, + { + "epoch": 1.1000645577792123, + "grad_norm": 1.1548186474810518, + "learning_rate": 0.0002556, + "loss": 2.6799, + "step": 1704 + }, + { + "epoch": 1.1007101355713362, + "grad_norm": 1.1762509605462605, + "learning_rate": 0.00025575, + "loss": 2.572, + "step": 1705 + }, + { + "epoch": 1.1013557133634604, + "grad_norm": 1.1316891744789959, + "learning_rate": 0.0002559, + "loss": 2.401, + "step": 1706 + }, + { + "epoch": 1.1020012911555843, + "grad_norm": 1.2143756045567726, + "learning_rate": 0.00025604999999999996, + "loss": 2.2692, + "step": 1707 + }, + { + "epoch": 1.1026468689477082, + "grad_norm": 1.1955464024464706, + "learning_rate": 0.0002562, + "loss": 2.1389, + "step": 1708 + }, + { + "epoch": 1.1032924467398322, + "grad_norm": 1.1961624243623912, + "learning_rate": 0.00025634999999999997, + "loss": 2.2826, + "step": 1709 + }, + { + "epoch": 1.103938024531956, + "grad_norm": 1.3431536021814368, + "learning_rate": 0.00025649999999999995, + "loss": 2.2905, + "step": 1710 + }, + { + "epoch": 1.10458360232408, + "grad_norm": 1.2301188664829728, + "learning_rate": 0.00025665, + "loss": 2.6582, + "step": 1711 + }, + { + "epoch": 1.105229180116204, + "grad_norm": 1.3089172114039807, + "learning_rate": 0.00025679999999999995, + "loss": 2.657, + "step": 1712 + }, + { + "epoch": 1.105874757908328, + "grad_norm": 1.1897922174958389, + "learning_rate": 0.00025695, + "loss": 2.4183, + "step": 1713 + }, + { + "epoch": 1.1065203357004518, + "grad_norm": 1.3534110455314168, + "learning_rate": 0.00025709999999999996, + "loss": 2.4987, + "step": 1714 + }, + { + "epoch": 1.1071659134925758, + "grad_norm": 1.1467232266351441, + "learning_rate": 0.00025725, + "loss": 2.3863, + "step": 1715 + }, + { + "epoch": 1.1078114912847, + "grad_norm": 1.1982090221967163, + "learning_rate": 0.00025739999999999997, + "loss": 2.4035, + "step": 1716 + }, + { + "epoch": 1.1084570690768238, + "grad_norm": 1.253502734421164, + "learning_rate": 0.00025755, + "loss": 2.5205, + "step": 1717 + }, + { + "epoch": 1.1091026468689478, + "grad_norm": 1.3344230647877273, + "learning_rate": 0.0002577, + "loss": 2.5876, + "step": 1718 + }, + { + "epoch": 1.1097482246610717, + "grad_norm": 1.1246903678983222, + "learning_rate": 0.00025785, + "loss": 2.1344, + "step": 1719 + }, + { + "epoch": 1.1103938024531956, + "grad_norm": 1.265024636668877, + "learning_rate": 0.000258, + "loss": 2.0486, + "step": 1720 + }, + { + "epoch": 1.1110393802453196, + "grad_norm": 1.1762960027535638, + "learning_rate": 0.00025815, + "loss": 2.189, + "step": 1721 + }, + { + "epoch": 1.1116849580374435, + "grad_norm": 1.3554237011444246, + "learning_rate": 0.0002583, + "loss": 2.4202, + "step": 1722 + }, + { + "epoch": 1.1123305358295674, + "grad_norm": 1.220303446904081, + "learning_rate": 0.00025844999999999997, + "loss": 1.9198, + "step": 1723 + }, + { + "epoch": 1.1129761136216914, + "grad_norm": 1.1748689669818082, + "learning_rate": 0.0002586, + "loss": 2.4632, + "step": 1724 + }, + { + "epoch": 1.1136216914138153, + "grad_norm": 1.3166772303010883, + "learning_rate": 0.00025875, + "loss": 2.4488, + "step": 1725 + }, + { + "epoch": 1.1142672692059392, + "grad_norm": 1.0701455379263356, + "learning_rate": 0.00025889999999999995, + "loss": 2.0266, + "step": 1726 + }, + { + "epoch": 1.1149128469980634, + "grad_norm": 1.1716894556960302, + "learning_rate": 0.00025905, + "loss": 2.4642, + "step": 1727 + }, + { + "epoch": 1.1155584247901873, + "grad_norm": 1.169227100186923, + "learning_rate": 0.00025919999999999996, + "loss": 2.3234, + "step": 1728 + }, + { + "epoch": 1.1162040025823112, + "grad_norm": 1.2666890092958878, + "learning_rate": 0.00025935, + "loss": 2.486, + "step": 1729 + }, + { + "epoch": 1.1168495803744352, + "grad_norm": 1.1342683665031024, + "learning_rate": 0.00025949999999999997, + "loss": 2.3977, + "step": 1730 + }, + { + "epoch": 1.117495158166559, + "grad_norm": 1.2431260854460404, + "learning_rate": 0.00025965, + "loss": 2.4349, + "step": 1731 + }, + { + "epoch": 1.118140735958683, + "grad_norm": 1.231048817697315, + "learning_rate": 0.00025979999999999997, + "loss": 2.6616, + "step": 1732 + }, + { + "epoch": 1.118786313750807, + "grad_norm": 1.242252934154783, + "learning_rate": 0.00025995, + "loss": 2.3928, + "step": 1733 + }, + { + "epoch": 1.1194318915429309, + "grad_norm": 1.1953396829172878, + "learning_rate": 0.0002601, + "loss": 2.3597, + "step": 1734 + }, + { + "epoch": 1.1200774693350548, + "grad_norm": 1.2879088368178375, + "learning_rate": 0.00026025, + "loss": 2.4942, + "step": 1735 + }, + { + "epoch": 1.1207230471271787, + "grad_norm": 1.0663821483085654, + "learning_rate": 0.0002604, + "loss": 2.3729, + "step": 1736 + }, + { + "epoch": 1.121368624919303, + "grad_norm": 1.1861310697990717, + "learning_rate": 0.00026055, + "loss": 2.541, + "step": 1737 + }, + { + "epoch": 1.1220142027114268, + "grad_norm": 1.179808566647685, + "learning_rate": 0.0002607, + "loss": 2.041, + "step": 1738 + }, + { + "epoch": 1.1226597805035508, + "grad_norm": 1.1324704729313988, + "learning_rate": 0.00026084999999999997, + "loss": 2.3999, + "step": 1739 + }, + { + "epoch": 1.1233053582956747, + "grad_norm": 1.1679007532954764, + "learning_rate": 0.000261, + "loss": 2.4394, + "step": 1740 + }, + { + "epoch": 1.1239509360877986, + "grad_norm": 1.1361561287434212, + "learning_rate": 0.00026115, + "loss": 2.4363, + "step": 1741 + }, + { + "epoch": 1.1245965138799225, + "grad_norm": 1.213686118090655, + "learning_rate": 0.00026129999999999995, + "loss": 2.328, + "step": 1742 + }, + { + "epoch": 1.1252420916720465, + "grad_norm": 1.34296269800688, + "learning_rate": 0.00026145, + "loss": 2.3238, + "step": 1743 + }, + { + "epoch": 1.1258876694641704, + "grad_norm": 1.14702707725686, + "learning_rate": 0.00026159999999999996, + "loss": 2.4486, + "step": 1744 + }, + { + "epoch": 1.1265332472562943, + "grad_norm": 1.2453303225362509, + "learning_rate": 0.00026175, + "loss": 2.2708, + "step": 1745 + }, + { + "epoch": 1.1271788250484183, + "grad_norm": 1.0955094176122553, + "learning_rate": 0.00026189999999999997, + "loss": 2.3158, + "step": 1746 + }, + { + "epoch": 1.1278244028405422, + "grad_norm": 1.123971121079445, + "learning_rate": 0.00026205, + "loss": 2.3736, + "step": 1747 + }, + { + "epoch": 1.1284699806326661, + "grad_norm": 1.181700185749163, + "learning_rate": 0.0002622, + "loss": 2.382, + "step": 1748 + }, + { + "epoch": 1.1291155584247903, + "grad_norm": 1.1789439014919356, + "learning_rate": 0.00026235, + "loss": 2.0577, + "step": 1749 + }, + { + "epoch": 1.1297611362169142, + "grad_norm": 1.2413402012102608, + "learning_rate": 0.0002625, + "loss": 2.3268, + "step": 1750 + }, + { + "epoch": 1.1304067140090381, + "grad_norm": 1.31938316327162, + "learning_rate": 0.00026264999999999996, + "loss": 2.6516, + "step": 1751 + }, + { + "epoch": 1.131052291801162, + "grad_norm": 1.2328553636241264, + "learning_rate": 0.0002628, + "loss": 2.331, + "step": 1752 + }, + { + "epoch": 1.131697869593286, + "grad_norm": 1.2106265855136567, + "learning_rate": 0.00026294999999999997, + "loss": 2.0696, + "step": 1753 + }, + { + "epoch": 1.13234344738541, + "grad_norm": 1.1291855843690237, + "learning_rate": 0.0002631, + "loss": 1.8569, + "step": 1754 + }, + { + "epoch": 1.1329890251775339, + "grad_norm": 1.3419794494437436, + "learning_rate": 0.00026325, + "loss": 2.3003, + "step": 1755 + }, + { + "epoch": 1.1336346029696578, + "grad_norm": 1.2488402654162905, + "learning_rate": 0.00026339999999999995, + "loss": 2.6121, + "step": 1756 + }, + { + "epoch": 1.1342801807617817, + "grad_norm": 1.300380156712599, + "learning_rate": 0.00026355, + "loss": 2.1899, + "step": 1757 + }, + { + "epoch": 1.1349257585539057, + "grad_norm": 1.1739144518000721, + "learning_rate": 0.00026369999999999996, + "loss": 2.1837, + "step": 1758 + }, + { + "epoch": 1.1355713363460298, + "grad_norm": 1.4743236084459794, + "learning_rate": 0.00026384999999999994, + "loss": 2.5134, + "step": 1759 + }, + { + "epoch": 1.1362169141381537, + "grad_norm": 1.063013141860041, + "learning_rate": 0.00026399999999999997, + "loss": 2.2042, + "step": 1760 + }, + { + "epoch": 1.1368624919302777, + "grad_norm": 1.2045788409686422, + "learning_rate": 0.00026414999999999994, + "loss": 2.5484, + "step": 1761 + }, + { + "epoch": 1.1375080697224016, + "grad_norm": 1.3326377914881447, + "learning_rate": 0.0002643, + "loss": 2.4103, + "step": 1762 + }, + { + "epoch": 1.1381536475145255, + "grad_norm": 1.1622106362331481, + "learning_rate": 0.00026444999999999995, + "loss": 2.0896, + "step": 1763 + }, + { + "epoch": 1.1387992253066495, + "grad_norm": 1.2349847238779457, + "learning_rate": 0.0002646, + "loss": 2.7093, + "step": 1764 + }, + { + "epoch": 1.1394448030987734, + "grad_norm": 1.2845741604158054, + "learning_rate": 0.00026474999999999996, + "loss": 2.313, + "step": 1765 + }, + { + "epoch": 1.1400903808908973, + "grad_norm": 1.2877453554799878, + "learning_rate": 0.0002649, + "loss": 2.4293, + "step": 1766 + }, + { + "epoch": 1.1407359586830212, + "grad_norm": 1.3043614526161187, + "learning_rate": 0.00026504999999999996, + "loss": 2.5226, + "step": 1767 + }, + { + "epoch": 1.1413815364751452, + "grad_norm": 1.203028750491462, + "learning_rate": 0.0002652, + "loss": 2.2899, + "step": 1768 + }, + { + "epoch": 1.142027114267269, + "grad_norm": 1.2153547445121928, + "learning_rate": 0.00026534999999999997, + "loss": 2.639, + "step": 1769 + }, + { + "epoch": 1.142672692059393, + "grad_norm": 1.156682467175619, + "learning_rate": 0.0002655, + "loss": 2.5197, + "step": 1770 + }, + { + "epoch": 1.1433182698515172, + "grad_norm": 1.1822770659162491, + "learning_rate": 0.00026565, + "loss": 2.2942, + "step": 1771 + }, + { + "epoch": 1.1439638476436411, + "grad_norm": 1.597911589234956, + "learning_rate": 0.00026579999999999996, + "loss": 2.5765, + "step": 1772 + }, + { + "epoch": 1.144609425435765, + "grad_norm": 1.2560399167374876, + "learning_rate": 0.00026595, + "loss": 2.6107, + "step": 1773 + }, + { + "epoch": 1.145255003227889, + "grad_norm": 1.194095120493676, + "learning_rate": 0.00026609999999999996, + "loss": 2.4006, + "step": 1774 + }, + { + "epoch": 1.145900581020013, + "grad_norm": 1.382660063328166, + "learning_rate": 0.00026624999999999994, + "loss": 2.4616, + "step": 1775 + }, + { + "epoch": 1.1465461588121368, + "grad_norm": 1.1238294867460021, + "learning_rate": 0.00026639999999999997, + "loss": 2.4761, + "step": 1776 + }, + { + "epoch": 1.1471917366042608, + "grad_norm": 1.0586201136633804, + "learning_rate": 0.00026654999999999995, + "loss": 2.2442, + "step": 1777 + }, + { + "epoch": 1.1478373143963847, + "grad_norm": 1.190792356932149, + "learning_rate": 0.0002667, + "loss": 2.426, + "step": 1778 + }, + { + "epoch": 1.1484828921885086, + "grad_norm": 1.2234826019693468, + "learning_rate": 0.00026684999999999995, + "loss": 2.5707, + "step": 1779 + }, + { + "epoch": 1.1491284699806328, + "grad_norm": 1.1883826094923065, + "learning_rate": 0.000267, + "loss": 2.4828, + "step": 1780 + }, + { + "epoch": 1.1497740477727567, + "grad_norm": 1.1948337160323488, + "learning_rate": 0.00026714999999999996, + "loss": 2.5111, + "step": 1781 + }, + { + "epoch": 1.1504196255648806, + "grad_norm": 1.1006069417896436, + "learning_rate": 0.0002673, + "loss": 2.1883, + "step": 1782 + }, + { + "epoch": 1.1510652033570046, + "grad_norm": 1.3652760590323305, + "learning_rate": 0.00026744999999999997, + "loss": 2.4061, + "step": 1783 + }, + { + "epoch": 1.1517107811491285, + "grad_norm": 1.1784334382478847, + "learning_rate": 0.0002676, + "loss": 2.2162, + "step": 1784 + }, + { + "epoch": 1.1523563589412524, + "grad_norm": 1.3755383165309922, + "learning_rate": 0.00026775, + "loss": 2.095, + "step": 1785 + }, + { + "epoch": 1.1530019367333764, + "grad_norm": 1.292078320014806, + "learning_rate": 0.0002679, + "loss": 2.5792, + "step": 1786 + }, + { + "epoch": 1.1536475145255003, + "grad_norm": 1.241340638868289, + "learning_rate": 0.00026805, + "loss": 2.7813, + "step": 1787 + }, + { + "epoch": 1.1542930923176242, + "grad_norm": 1.2548459133074508, + "learning_rate": 0.00026819999999999996, + "loss": 2.5016, + "step": 1788 + }, + { + "epoch": 1.1549386701097482, + "grad_norm": 1.2583068041032408, + "learning_rate": 0.00026835, + "loss": 2.0933, + "step": 1789 + }, + { + "epoch": 1.155584247901872, + "grad_norm": 1.124067118125323, + "learning_rate": 0.00026849999999999997, + "loss": 2.1081, + "step": 1790 + }, + { + "epoch": 1.156229825693996, + "grad_norm": 1.128599322974761, + "learning_rate": 0.00026864999999999994, + "loss": 2.4185, + "step": 1791 + }, + { + "epoch": 1.1568754034861202, + "grad_norm": 1.2086951286801573, + "learning_rate": 0.0002688, + "loss": 2.2678, + "step": 1792 + }, + { + "epoch": 1.157520981278244, + "grad_norm": 1.1416473050550873, + "learning_rate": 0.00026894999999999995, + "loss": 2.3177, + "step": 1793 + }, + { + "epoch": 1.158166559070368, + "grad_norm": 1.2917248833185426, + "learning_rate": 0.0002691, + "loss": 2.6463, + "step": 1794 + }, + { + "epoch": 1.158812136862492, + "grad_norm": 1.2035748229764205, + "learning_rate": 0.00026924999999999996, + "loss": 2.4364, + "step": 1795 + }, + { + "epoch": 1.159457714654616, + "grad_norm": 1.4023996780454302, + "learning_rate": 0.0002694, + "loss": 2.342, + "step": 1796 + }, + { + "epoch": 1.1601032924467398, + "grad_norm": 1.1929548309138398, + "learning_rate": 0.00026954999999999997, + "loss": 2.4698, + "step": 1797 + }, + { + "epoch": 1.1607488702388638, + "grad_norm": 1.200161319693109, + "learning_rate": 0.0002697, + "loss": 2.5014, + "step": 1798 + }, + { + "epoch": 1.1613944480309877, + "grad_norm": 1.3029662349950137, + "learning_rate": 0.00026984999999999997, + "loss": 2.3629, + "step": 1799 + }, + { + "epoch": 1.1620400258231116, + "grad_norm": 1.1780737944432755, + "learning_rate": 0.00027, + "loss": 2.4235, + "step": 1800 + }, + { + "epoch": 1.1626856036152355, + "grad_norm": 1.050547310773558, + "learning_rate": 0.00027015, + "loss": 2.0097, + "step": 1801 + }, + { + "epoch": 1.1633311814073597, + "grad_norm": 1.2383976905599363, + "learning_rate": 0.00027029999999999996, + "loss": 2.4778, + "step": 1802 + }, + { + "epoch": 1.1639767591994836, + "grad_norm": 1.2119683377573465, + "learning_rate": 0.00027045, + "loss": 2.083, + "step": 1803 + }, + { + "epoch": 1.1646223369916076, + "grad_norm": 1.1813298706425592, + "learning_rate": 0.00027059999999999996, + "loss": 2.567, + "step": 1804 + }, + { + "epoch": 1.1652679147837315, + "grad_norm": 1.3418708884387414, + "learning_rate": 0.00027074999999999994, + "loss": 2.7407, + "step": 1805 + }, + { + "epoch": 1.1659134925758554, + "grad_norm": 1.177099468165103, + "learning_rate": 0.00027089999999999997, + "loss": 2.3718, + "step": 1806 + }, + { + "epoch": 1.1665590703679793, + "grad_norm": 1.2430889672562186, + "learning_rate": 0.00027104999999999995, + "loss": 2.5778, + "step": 1807 + }, + { + "epoch": 1.1672046481601033, + "grad_norm": 1.2365867356499056, + "learning_rate": 0.0002712, + "loss": 2.3832, + "step": 1808 + }, + { + "epoch": 1.1678502259522272, + "grad_norm": 1.101492924047418, + "learning_rate": 0.00027134999999999995, + "loss": 2.2686, + "step": 1809 + }, + { + "epoch": 1.1684958037443511, + "grad_norm": 1.2436192540229496, + "learning_rate": 0.0002715, + "loss": 2.5148, + "step": 1810 + }, + { + "epoch": 1.169141381536475, + "grad_norm": 1.1961201662125744, + "learning_rate": 0.00027164999999999996, + "loss": 2.5706, + "step": 1811 + }, + { + "epoch": 1.169786959328599, + "grad_norm": 1.2609863544505528, + "learning_rate": 0.0002718, + "loss": 2.1866, + "step": 1812 + }, + { + "epoch": 1.170432537120723, + "grad_norm": 1.1372426979592598, + "learning_rate": 0.00027194999999999997, + "loss": 2.2081, + "step": 1813 + }, + { + "epoch": 1.171078114912847, + "grad_norm": 1.154660269295889, + "learning_rate": 0.0002721, + "loss": 2.2695, + "step": 1814 + }, + { + "epoch": 1.171723692704971, + "grad_norm": 1.2813560301451044, + "learning_rate": 0.00027225, + "loss": 2.4553, + "step": 1815 + }, + { + "epoch": 1.172369270497095, + "grad_norm": 1.161837528872945, + "learning_rate": 0.0002724, + "loss": 2.3718, + "step": 1816 + }, + { + "epoch": 1.1730148482892189, + "grad_norm": 1.182717065622611, + "learning_rate": 0.00027255, + "loss": 2.4525, + "step": 1817 + }, + { + "epoch": 1.1736604260813428, + "grad_norm": 1.2251559164325063, + "learning_rate": 0.00027269999999999996, + "loss": 2.0983, + "step": 1818 + }, + { + "epoch": 1.1743060038734667, + "grad_norm": 1.2767682451162856, + "learning_rate": 0.00027285, + "loss": 1.9588, + "step": 1819 + }, + { + "epoch": 1.1749515816655907, + "grad_norm": 1.326362752702936, + "learning_rate": 0.00027299999999999997, + "loss": 2.5438, + "step": 1820 + }, + { + "epoch": 1.1755971594577146, + "grad_norm": 1.2032964118024712, + "learning_rate": 0.00027314999999999994, + "loss": 2.3531, + "step": 1821 + }, + { + "epoch": 1.1762427372498385, + "grad_norm": 1.239713706776897, + "learning_rate": 0.0002733, + "loss": 2.2989, + "step": 1822 + }, + { + "epoch": 1.1768883150419627, + "grad_norm": 1.1194418680507165, + "learning_rate": 0.00027344999999999995, + "loss": 2.3213, + "step": 1823 + }, + { + "epoch": 1.1775338928340866, + "grad_norm": 1.1026549238925092, + "learning_rate": 0.0002736, + "loss": 2.4707, + "step": 1824 + }, + { + "epoch": 1.1781794706262105, + "grad_norm": 1.1871707172302561, + "learning_rate": 0.00027374999999999996, + "loss": 2.6226, + "step": 1825 + }, + { + "epoch": 1.1788250484183345, + "grad_norm": 1.200847010048262, + "learning_rate": 0.0002739, + "loss": 2.5771, + "step": 1826 + }, + { + "epoch": 1.1794706262104584, + "grad_norm": 1.0972729313392577, + "learning_rate": 0.00027404999999999997, + "loss": 2.2552, + "step": 1827 + }, + { + "epoch": 1.1801162040025823, + "grad_norm": 1.162176913071764, + "learning_rate": 0.0002742, + "loss": 2.2549, + "step": 1828 + }, + { + "epoch": 1.1807617817947063, + "grad_norm": 1.177094841199423, + "learning_rate": 0.00027435, + "loss": 2.5731, + "step": 1829 + }, + { + "epoch": 1.1814073595868302, + "grad_norm": 1.2032956026251056, + "learning_rate": 0.0002745, + "loss": 2.4887, + "step": 1830 + }, + { + "epoch": 1.1820529373789541, + "grad_norm": 1.3098784007916502, + "learning_rate": 0.00027465, + "loss": 2.5283, + "step": 1831 + }, + { + "epoch": 1.182698515171078, + "grad_norm": 1.1284115095226308, + "learning_rate": 0.0002748, + "loss": 2.524, + "step": 1832 + }, + { + "epoch": 1.183344092963202, + "grad_norm": 1.1253565930376328, + "learning_rate": 0.00027495, + "loss": 2.3892, + "step": 1833 + }, + { + "epoch": 1.183989670755326, + "grad_norm": 1.0802044704411828, + "learning_rate": 0.00027509999999999996, + "loss": 2.3469, + "step": 1834 + }, + { + "epoch": 1.18463524854745, + "grad_norm": 1.3416764293004249, + "learning_rate": 0.00027525, + "loss": 2.3903, + "step": 1835 + }, + { + "epoch": 1.185280826339574, + "grad_norm": 1.254114854193434, + "learning_rate": 0.00027539999999999997, + "loss": 2.1793, + "step": 1836 + }, + { + "epoch": 1.185926404131698, + "grad_norm": 1.3368083070472918, + "learning_rate": 0.00027554999999999995, + "loss": 2.3088, + "step": 1837 + }, + { + "epoch": 1.1865719819238219, + "grad_norm": 1.1632562503571489, + "learning_rate": 0.0002757, + "loss": 2.3597, + "step": 1838 + }, + { + "epoch": 1.1872175597159458, + "grad_norm": 1.2783462495697135, + "learning_rate": 0.00027584999999999996, + "loss": 2.25, + "step": 1839 + }, + { + "epoch": 1.1878631375080697, + "grad_norm": 1.259296507388869, + "learning_rate": 0.000276, + "loss": 2.6232, + "step": 1840 + }, + { + "epoch": 1.1885087153001936, + "grad_norm": 1.2667447334540893, + "learning_rate": 0.00027614999999999996, + "loss": 2.4897, + "step": 1841 + }, + { + "epoch": 1.1891542930923176, + "grad_norm": 1.157264047511543, + "learning_rate": 0.0002763, + "loss": 2.2756, + "step": 1842 + }, + { + "epoch": 1.1897998708844415, + "grad_norm": 1.1880164608314792, + "learning_rate": 0.00027644999999999997, + "loss": 2.2986, + "step": 1843 + }, + { + "epoch": 1.1904454486765654, + "grad_norm": 1.170941981561124, + "learning_rate": 0.0002766, + "loss": 2.2675, + "step": 1844 + }, + { + "epoch": 1.1910910264686896, + "grad_norm": 1.251604765337782, + "learning_rate": 0.00027675, + "loss": 2.5371, + "step": 1845 + }, + { + "epoch": 1.1917366042608135, + "grad_norm": 1.1858423532747884, + "learning_rate": 0.0002769, + "loss": 2.6753, + "step": 1846 + }, + { + "epoch": 1.1923821820529374, + "grad_norm": 1.094293992868728, + "learning_rate": 0.00027705, + "loss": 2.5596, + "step": 1847 + }, + { + "epoch": 1.1930277598450614, + "grad_norm": 1.0822522535263714, + "learning_rate": 0.0002772, + "loss": 2.4592, + "step": 1848 + }, + { + "epoch": 1.1936733376371853, + "grad_norm": 1.1602094839630592, + "learning_rate": 0.00027735, + "loss": 2.029, + "step": 1849 + }, + { + "epoch": 1.1943189154293092, + "grad_norm": 1.3184688625117478, + "learning_rate": 0.00027749999999999997, + "loss": 2.3013, + "step": 1850 + }, + { + "epoch": 1.1949644932214332, + "grad_norm": 1.159583675967937, + "learning_rate": 0.00027764999999999995, + "loss": 2.4176, + "step": 1851 + }, + { + "epoch": 1.195610071013557, + "grad_norm": 1.1837004124008828, + "learning_rate": 0.0002778, + "loss": 2.3676, + "step": 1852 + }, + { + "epoch": 1.196255648805681, + "grad_norm": 1.373504944372784, + "learning_rate": 0.00027794999999999995, + "loss": 2.7126, + "step": 1853 + }, + { + "epoch": 1.196901226597805, + "grad_norm": 1.1860881775142855, + "learning_rate": 0.0002781, + "loss": 2.2607, + "step": 1854 + }, + { + "epoch": 1.1975468043899289, + "grad_norm": 1.2287433124832163, + "learning_rate": 0.00027824999999999996, + "loss": 2.3984, + "step": 1855 + }, + { + "epoch": 1.1981923821820528, + "grad_norm": 1.233419804733879, + "learning_rate": 0.0002784, + "loss": 2.4769, + "step": 1856 + }, + { + "epoch": 1.198837959974177, + "grad_norm": 1.326954445964219, + "learning_rate": 0.00027854999999999997, + "loss": 2.5336, + "step": 1857 + }, + { + "epoch": 1.199483537766301, + "grad_norm": 1.118350198473579, + "learning_rate": 0.0002787, + "loss": 2.3368, + "step": 1858 + }, + { + "epoch": 1.2001291155584248, + "grad_norm": 1.2247483028869428, + "learning_rate": 0.00027885, + "loss": 2.4236, + "step": 1859 + }, + { + "epoch": 1.2007746933505488, + "grad_norm": 1.21103973890637, + "learning_rate": 0.000279, + "loss": 2.2952, + "step": 1860 + }, + { + "epoch": 1.2014202711426727, + "grad_norm": 1.1843179746206474, + "learning_rate": 0.00027915, + "loss": 2.3163, + "step": 1861 + }, + { + "epoch": 1.2020658489347966, + "grad_norm": 1.0970801301409536, + "learning_rate": 0.0002793, + "loss": 2.0932, + "step": 1862 + }, + { + "epoch": 1.2027114267269206, + "grad_norm": 1.174381953449968, + "learning_rate": 0.00027945, + "loss": 2.5674, + "step": 1863 + }, + { + "epoch": 1.2033570045190445, + "grad_norm": 1.2380251888204798, + "learning_rate": 0.00027959999999999997, + "loss": 2.6753, + "step": 1864 + }, + { + "epoch": 1.2040025823111684, + "grad_norm": 1.3113507412459562, + "learning_rate": 0.00027975, + "loss": 2.3776, + "step": 1865 + }, + { + "epoch": 1.2046481601032926, + "grad_norm": 1.1276403584278198, + "learning_rate": 0.0002799, + "loss": 2.2937, + "step": 1866 + }, + { + "epoch": 1.2052937378954165, + "grad_norm": 1.0853076489500622, + "learning_rate": 0.00028004999999999995, + "loss": 2.5106, + "step": 1867 + }, + { + "epoch": 1.2059393156875404, + "grad_norm": 1.3270103527124706, + "learning_rate": 0.0002802, + "loss": 2.3016, + "step": 1868 + }, + { + "epoch": 1.2065848934796644, + "grad_norm": 1.186509922910511, + "learning_rate": 0.00028034999999999996, + "loss": 2.5673, + "step": 1869 + }, + { + "epoch": 1.2072304712717883, + "grad_norm": 1.200200008235454, + "learning_rate": 0.0002805, + "loss": 2.6256, + "step": 1870 + }, + { + "epoch": 1.2078760490639122, + "grad_norm": 1.2514521363927178, + "learning_rate": 0.00028064999999999996, + "loss": 2.3999, + "step": 1871 + }, + { + "epoch": 1.2085216268560361, + "grad_norm": 1.276909214136805, + "learning_rate": 0.0002808, + "loss": 2.429, + "step": 1872 + }, + { + "epoch": 1.20916720464816, + "grad_norm": 1.1998194839552125, + "learning_rate": 0.00028094999999999997, + "loss": 2.448, + "step": 1873 + }, + { + "epoch": 1.209812782440284, + "grad_norm": 1.3040373349796657, + "learning_rate": 0.0002811, + "loss": 2.6881, + "step": 1874 + }, + { + "epoch": 1.210458360232408, + "grad_norm": 1.1580985427034367, + "learning_rate": 0.00028125, + "loss": 2.5469, + "step": 1875 + }, + { + "epoch": 1.2111039380245319, + "grad_norm": 1.2577111857161438, + "learning_rate": 0.00028139999999999996, + "loss": 2.4464, + "step": 1876 + }, + { + "epoch": 1.2117495158166558, + "grad_norm": 1.2233229032962503, + "learning_rate": 0.00028155, + "loss": 2.2266, + "step": 1877 + }, + { + "epoch": 1.21239509360878, + "grad_norm": 1.1489865339811247, + "learning_rate": 0.00028169999999999996, + "loss": 2.454, + "step": 1878 + }, + { + "epoch": 1.2130406714009039, + "grad_norm": 1.2292210146789952, + "learning_rate": 0.00028185, + "loss": 2.5099, + "step": 1879 + }, + { + "epoch": 1.2136862491930278, + "grad_norm": 1.2699466303003064, + "learning_rate": 0.00028199999999999997, + "loss": 2.3488, + "step": 1880 + }, + { + "epoch": 1.2143318269851517, + "grad_norm": 1.1240410106510756, + "learning_rate": 0.00028215, + "loss": 2.1731, + "step": 1881 + }, + { + "epoch": 1.2149774047772757, + "grad_norm": 1.2270025676931344, + "learning_rate": 0.0002823, + "loss": 2.4921, + "step": 1882 + }, + { + "epoch": 1.2156229825693996, + "grad_norm": 1.2116121898522167, + "learning_rate": 0.00028244999999999995, + "loss": 1.9815, + "step": 1883 + }, + { + "epoch": 1.2162685603615235, + "grad_norm": 1.179802667608452, + "learning_rate": 0.0002826, + "loss": 1.9811, + "step": 1884 + }, + { + "epoch": 1.2169141381536475, + "grad_norm": 1.1259455123376807, + "learning_rate": 0.00028274999999999996, + "loss": 1.9528, + "step": 1885 + }, + { + "epoch": 1.2175597159457714, + "grad_norm": 1.0435425454330631, + "learning_rate": 0.00028289999999999994, + "loss": 2.0445, + "step": 1886 + }, + { + "epoch": 1.2182052937378953, + "grad_norm": 1.205206821264797, + "learning_rate": 0.00028304999999999997, + "loss": 2.3581, + "step": 1887 + }, + { + "epoch": 1.2188508715300195, + "grad_norm": 1.178349142216774, + "learning_rate": 0.00028319999999999994, + "loss": 2.0231, + "step": 1888 + }, + { + "epoch": 1.2194964493221434, + "grad_norm": 1.2454363720367598, + "learning_rate": 0.00028335, + "loss": 2.457, + "step": 1889 + }, + { + "epoch": 1.2201420271142673, + "grad_norm": 1.3596889125328877, + "learning_rate": 0.00028349999999999995, + "loss": 2.463, + "step": 1890 + }, + { + "epoch": 1.2207876049063913, + "grad_norm": 1.213671417757076, + "learning_rate": 0.00028365, + "loss": 2.3169, + "step": 1891 + }, + { + "epoch": 1.2214331826985152, + "grad_norm": 1.2407447962266362, + "learning_rate": 0.00028379999999999996, + "loss": 2.7347, + "step": 1892 + }, + { + "epoch": 1.2220787604906391, + "grad_norm": 1.155476033544199, + "learning_rate": 0.00028395, + "loss": 2.3491, + "step": 1893 + }, + { + "epoch": 1.222724338282763, + "grad_norm": 1.0892990167928465, + "learning_rate": 0.00028409999999999997, + "loss": 1.946, + "step": 1894 + }, + { + "epoch": 1.223369916074887, + "grad_norm": 1.0827846128890133, + "learning_rate": 0.00028425, + "loss": 1.9953, + "step": 1895 + }, + { + "epoch": 1.224015493867011, + "grad_norm": 1.2572300060053538, + "learning_rate": 0.0002844, + "loss": 2.6741, + "step": 1896 + }, + { + "epoch": 1.2246610716591348, + "grad_norm": 1.137752778964786, + "learning_rate": 0.00028455, + "loss": 2.5307, + "step": 1897 + }, + { + "epoch": 1.2253066494512588, + "grad_norm": 1.1832336960070509, + "learning_rate": 0.0002847, + "loss": 2.5629, + "step": 1898 + }, + { + "epoch": 1.2259522272433827, + "grad_norm": 1.1193774338307434, + "learning_rate": 0.00028484999999999996, + "loss": 1.8989, + "step": 1899 + }, + { + "epoch": 1.2265978050355069, + "grad_norm": 1.1171414638682096, + "learning_rate": 0.000285, + "loss": 2.3426, + "step": 1900 + }, + { + "epoch": 1.2272433828276308, + "grad_norm": 1.1176197776905505, + "learning_rate": 0.00028514999999999997, + "loss": 2.2379, + "step": 1901 + }, + { + "epoch": 1.2278889606197547, + "grad_norm": 1.2932436232315017, + "learning_rate": 0.00028529999999999994, + "loss": 2.2891, + "step": 1902 + }, + { + "epoch": 1.2285345384118787, + "grad_norm": 1.2582163071204029, + "learning_rate": 0.00028544999999999997, + "loss": 2.5688, + "step": 1903 + }, + { + "epoch": 1.2291801162040026, + "grad_norm": 1.3049900539485124, + "learning_rate": 0.00028559999999999995, + "loss": 2.659, + "step": 1904 + }, + { + "epoch": 1.2298256939961265, + "grad_norm": 1.1553791549201298, + "learning_rate": 0.00028575, + "loss": 2.1747, + "step": 1905 + }, + { + "epoch": 1.2304712717882504, + "grad_norm": 1.2341923397295447, + "learning_rate": 0.00028589999999999996, + "loss": 2.4008, + "step": 1906 + }, + { + "epoch": 1.2311168495803744, + "grad_norm": 1.2890108804725857, + "learning_rate": 0.00028605, + "loss": 2.5982, + "step": 1907 + }, + { + "epoch": 1.2317624273724983, + "grad_norm": 1.0961630285242165, + "learning_rate": 0.00028619999999999996, + "loss": 2.266, + "step": 1908 + }, + { + "epoch": 1.2324080051646225, + "grad_norm": 1.0879486557063376, + "learning_rate": 0.00028635, + "loss": 2.0365, + "step": 1909 + }, + { + "epoch": 1.2330535829567464, + "grad_norm": 1.2635940237958967, + "learning_rate": 0.00028649999999999997, + "loss": 2.3183, + "step": 1910 + }, + { + "epoch": 1.2336991607488703, + "grad_norm": 1.2826569125130127, + "learning_rate": 0.00028665, + "loss": 2.4176, + "step": 1911 + }, + { + "epoch": 1.2343447385409942, + "grad_norm": 1.2399249109504848, + "learning_rate": 0.0002868, + "loss": 2.6061, + "step": 1912 + }, + { + "epoch": 1.2349903163331182, + "grad_norm": 1.1582252181095647, + "learning_rate": 0.00028694999999999995, + "loss": 2.3401, + "step": 1913 + }, + { + "epoch": 1.235635894125242, + "grad_norm": 1.2145296306222104, + "learning_rate": 0.0002871, + "loss": 2.3256, + "step": 1914 + }, + { + "epoch": 1.236281471917366, + "grad_norm": 1.0638515218770626, + "learning_rate": 0.00028724999999999996, + "loss": 1.9281, + "step": 1915 + }, + { + "epoch": 1.23692704970949, + "grad_norm": 1.1829199089198266, + "learning_rate": 0.00028739999999999994, + "loss": 2.2923, + "step": 1916 + }, + { + "epoch": 1.237572627501614, + "grad_norm": 1.2176597900789723, + "learning_rate": 0.00028754999999999997, + "loss": 1.9694, + "step": 1917 + }, + { + "epoch": 1.2382182052937378, + "grad_norm": 1.1801605694063808, + "learning_rate": 0.00028769999999999995, + "loss": 2.277, + "step": 1918 + }, + { + "epoch": 1.2388637830858618, + "grad_norm": 1.1563118464741708, + "learning_rate": 0.00028785, + "loss": 2.0333, + "step": 1919 + }, + { + "epoch": 1.2395093608779857, + "grad_norm": 1.225589912413854, + "learning_rate": 0.00028799999999999995, + "loss": 2.3662, + "step": 1920 + }, + { + "epoch": 1.2401549386701098, + "grad_norm": 1.616132225969086, + "learning_rate": 0.00028815, + "loss": 2.6454, + "step": 1921 + }, + { + "epoch": 1.2408005164622338, + "grad_norm": 1.3485356262140764, + "learning_rate": 0.00028829999999999996, + "loss": 2.5684, + "step": 1922 + }, + { + "epoch": 1.2414460942543577, + "grad_norm": 1.4176578933832007, + "learning_rate": 0.00028845, + "loss": 2.5643, + "step": 1923 + }, + { + "epoch": 1.2420916720464816, + "grad_norm": 1.4965695070063039, + "learning_rate": 0.00028859999999999997, + "loss": 2.6557, + "step": 1924 + }, + { + "epoch": 1.2427372498386056, + "grad_norm": 1.1421165991192541, + "learning_rate": 0.00028875, + "loss": 2.4381, + "step": 1925 + }, + { + "epoch": 1.2433828276307295, + "grad_norm": 1.3884944690192607, + "learning_rate": 0.0002889, + "loss": 2.3552, + "step": 1926 + }, + { + "epoch": 1.2440284054228534, + "grad_norm": 1.2013162980667378, + "learning_rate": 0.00028905, + "loss": 2.2145, + "step": 1927 + }, + { + "epoch": 1.2446739832149774, + "grad_norm": 1.2344236329496894, + "learning_rate": 0.0002892, + "loss": 2.419, + "step": 1928 + }, + { + "epoch": 1.2453195610071013, + "grad_norm": 1.126184806450743, + "learning_rate": 0.00028934999999999996, + "loss": 2.3596, + "step": 1929 + }, + { + "epoch": 1.2459651387992252, + "grad_norm": 1.131577445710547, + "learning_rate": 0.0002895, + "loss": 1.9906, + "step": 1930 + }, + { + "epoch": 1.2466107165913494, + "grad_norm": 1.4974627585432194, + "learning_rate": 0.00028964999999999997, + "loss": 2.7639, + "step": 1931 + }, + { + "epoch": 1.2472562943834733, + "grad_norm": 1.2091326904017616, + "learning_rate": 0.00028979999999999994, + "loss": 2.4515, + "step": 1932 + }, + { + "epoch": 1.2479018721755972, + "grad_norm": 1.1369539068805037, + "learning_rate": 0.00028995, + "loss": 2.3678, + "step": 1933 + }, + { + "epoch": 1.2485474499677212, + "grad_norm": 1.0558001050189765, + "learning_rate": 0.00029009999999999995, + "loss": 2.2704, + "step": 1934 + }, + { + "epoch": 1.249193027759845, + "grad_norm": 1.122759628869313, + "learning_rate": 0.00029025, + "loss": 1.9843, + "step": 1935 + }, + { + "epoch": 1.249838605551969, + "grad_norm": 1.1512046773645204, + "learning_rate": 0.00029039999999999996, + "loss": 2.3102, + "step": 1936 + }, + { + "epoch": 1.250484183344093, + "grad_norm": 1.2298143110419038, + "learning_rate": 0.00029055, + "loss": 1.9951, + "step": 1937 + }, + { + "epoch": 1.2511297611362169, + "grad_norm": 1.1947987455261393, + "learning_rate": 0.00029069999999999996, + "loss": 2.2431, + "step": 1938 + }, + { + "epoch": 1.2517753389283408, + "grad_norm": 1.2505649299473078, + "learning_rate": 0.00029085, + "loss": 2.7061, + "step": 1939 + }, + { + "epoch": 1.2524209167204647, + "grad_norm": 1.325439751832841, + "learning_rate": 0.00029099999999999997, + "loss": 2.58, + "step": 1940 + }, + { + "epoch": 1.2530664945125887, + "grad_norm": 1.2007084315966305, + "learning_rate": 0.00029115, + "loss": 2.4166, + "step": 1941 + }, + { + "epoch": 1.2537120723047126, + "grad_norm": 1.2967312875311596, + "learning_rate": 0.0002913, + "loss": 2.6296, + "step": 1942 + }, + { + "epoch": 1.2543576500968368, + "grad_norm": 1.1851892528321128, + "learning_rate": 0.00029145, + "loss": 2.0472, + "step": 1943 + }, + { + "epoch": 1.2550032278889607, + "grad_norm": 1.3783828785344485, + "learning_rate": 0.0002916, + "loss": 2.3387, + "step": 1944 + }, + { + "epoch": 1.2556488056810846, + "grad_norm": 1.2081923834485218, + "learning_rate": 0.00029174999999999996, + "loss": 2.007, + "step": 1945 + }, + { + "epoch": 1.2562943834732085, + "grad_norm": 1.1808183278658728, + "learning_rate": 0.0002919, + "loss": 2.0168, + "step": 1946 + }, + { + "epoch": 1.2569399612653325, + "grad_norm": 1.2156838256575542, + "learning_rate": 0.00029204999999999997, + "loss": 2.4216, + "step": 1947 + }, + { + "epoch": 1.2575855390574564, + "grad_norm": 1.2097508415048948, + "learning_rate": 0.00029219999999999995, + "loss": 2.4916, + "step": 1948 + }, + { + "epoch": 1.2582311168495803, + "grad_norm": 1.1647176988941657, + "learning_rate": 0.00029235, + "loss": 2.424, + "step": 1949 + }, + { + "epoch": 1.2588766946417043, + "grad_norm": 1.2199636412855366, + "learning_rate": 0.00029249999999999995, + "loss": 2.2098, + "step": 1950 + }, + { + "epoch": 1.2595222724338282, + "grad_norm": 1.1875711132733524, + "learning_rate": 0.00029265, + "loss": 2.3755, + "step": 1951 + }, + { + "epoch": 1.2601678502259523, + "grad_norm": 1.161023406858918, + "learning_rate": 0.00029279999999999996, + "loss": 2.3507, + "step": 1952 + }, + { + "epoch": 1.2608134280180763, + "grad_norm": 1.2290047941774929, + "learning_rate": 0.00029295, + "loss": 2.6138, + "step": 1953 + }, + { + "epoch": 1.2614590058102002, + "grad_norm": 1.0993511841864922, + "learning_rate": 0.00029309999999999997, + "loss": 2.356, + "step": 1954 + }, + { + "epoch": 1.2621045836023241, + "grad_norm": 1.1205285257981534, + "learning_rate": 0.00029325, + "loss": 2.0966, + "step": 1955 + }, + { + "epoch": 1.262750161394448, + "grad_norm": 1.1630095200281139, + "learning_rate": 0.0002934, + "loss": 2.2096, + "step": 1956 + }, + { + "epoch": 1.263395739186572, + "grad_norm": 1.1567284899709578, + "learning_rate": 0.00029355, + "loss": 2.2696, + "step": 1957 + }, + { + "epoch": 1.264041316978696, + "grad_norm": 1.2478043152831224, + "learning_rate": 0.0002937, + "loss": 2.5189, + "step": 1958 + }, + { + "epoch": 1.2646868947708199, + "grad_norm": 1.1001235671077554, + "learning_rate": 0.00029384999999999996, + "loss": 2.3021, + "step": 1959 + }, + { + "epoch": 1.2653324725629438, + "grad_norm": 1.1909245920604543, + "learning_rate": 0.000294, + "loss": 2.4014, + "step": 1960 + }, + { + "epoch": 1.2659780503550677, + "grad_norm": 1.2861175457209357, + "learning_rate": 0.00029414999999999997, + "loss": 2.6293, + "step": 1961 + }, + { + "epoch": 1.2666236281471916, + "grad_norm": 1.1716987684056952, + "learning_rate": 0.00029429999999999994, + "loss": 2.4486, + "step": 1962 + }, + { + "epoch": 1.2672692059393156, + "grad_norm": 1.245622158564618, + "learning_rate": 0.00029445, + "loss": 2.608, + "step": 1963 + }, + { + "epoch": 1.2679147837314395, + "grad_norm": 1.1086055871379719, + "learning_rate": 0.00029459999999999995, + "loss": 2.3981, + "step": 1964 + }, + { + "epoch": 1.2685603615235637, + "grad_norm": 1.1860957566889194, + "learning_rate": 0.00029475, + "loss": 2.1038, + "step": 1965 + }, + { + "epoch": 1.2692059393156876, + "grad_norm": 1.4033045816945473, + "learning_rate": 0.00029489999999999996, + "loss": 2.4889, + "step": 1966 + }, + { + "epoch": 1.2698515171078115, + "grad_norm": 1.1214447853840193, + "learning_rate": 0.00029505, + "loss": 2.276, + "step": 1967 + }, + { + "epoch": 1.2704970948999355, + "grad_norm": 1.5441156003416618, + "learning_rate": 0.00029519999999999997, + "loss": 2.4729, + "step": 1968 + }, + { + "epoch": 1.2711426726920594, + "grad_norm": 1.3075466096968305, + "learning_rate": 0.00029535, + "loss": 2.3419, + "step": 1969 + }, + { + "epoch": 1.2717882504841833, + "grad_norm": 1.1763828976125668, + "learning_rate": 0.00029549999999999997, + "loss": 2.3367, + "step": 1970 + }, + { + "epoch": 1.2724338282763072, + "grad_norm": 1.3028445305663405, + "learning_rate": 0.00029565, + "loss": 2.4813, + "step": 1971 + }, + { + "epoch": 1.2730794060684312, + "grad_norm": 1.1324698131945212, + "learning_rate": 0.0002958, + "loss": 2.2546, + "step": 1972 + }, + { + "epoch": 1.2737249838605553, + "grad_norm": 1.2779259882742007, + "learning_rate": 0.00029595, + "loss": 2.2969, + "step": 1973 + }, + { + "epoch": 1.2743705616526793, + "grad_norm": 1.1062613767798326, + "learning_rate": 0.0002961, + "loss": 2.1105, + "step": 1974 + }, + { + "epoch": 1.2750161394448032, + "grad_norm": 1.245471751526302, + "learning_rate": 0.00029624999999999996, + "loss": 1.9089, + "step": 1975 + }, + { + "epoch": 1.2756617172369271, + "grad_norm": 1.2535105197525127, + "learning_rate": 0.0002964, + "loss": 2.5115, + "step": 1976 + }, + { + "epoch": 1.276307295029051, + "grad_norm": 1.3387887679965376, + "learning_rate": 0.00029654999999999997, + "loss": 2.5363, + "step": 1977 + }, + { + "epoch": 1.276952872821175, + "grad_norm": 1.2019996804491635, + "learning_rate": 0.00029669999999999995, + "loss": 2.1011, + "step": 1978 + }, + { + "epoch": 1.277598450613299, + "grad_norm": 1.2208462315159379, + "learning_rate": 0.00029685, + "loss": 2.3868, + "step": 1979 + }, + { + "epoch": 1.2782440284054228, + "grad_norm": 1.2483618221752537, + "learning_rate": 0.00029699999999999996, + "loss": 2.7073, + "step": 1980 + }, + { + "epoch": 1.2788896061975468, + "grad_norm": 1.1483892817297567, + "learning_rate": 0.00029715, + "loss": 2.6017, + "step": 1981 + }, + { + "epoch": 1.2795351839896707, + "grad_norm": 1.1433520327968418, + "learning_rate": 0.00029729999999999996, + "loss": 2.472, + "step": 1982 + }, + { + "epoch": 1.2801807617817946, + "grad_norm": 1.260512851787888, + "learning_rate": 0.00029745, + "loss": 2.5426, + "step": 1983 + }, + { + "epoch": 1.2808263395739186, + "grad_norm": 1.1203949208138646, + "learning_rate": 0.00029759999999999997, + "loss": 2.3835, + "step": 1984 + }, + { + "epoch": 1.2814719173660425, + "grad_norm": 1.1082259998067467, + "learning_rate": 0.00029775, + "loss": 2.4487, + "step": 1985 + }, + { + "epoch": 1.2821174951581664, + "grad_norm": 1.1770192895572382, + "learning_rate": 0.0002979, + "loss": 2.3427, + "step": 1986 + }, + { + "epoch": 1.2827630729502906, + "grad_norm": 1.210480105132019, + "learning_rate": 0.00029805, + "loss": 2.5764, + "step": 1987 + }, + { + "epoch": 1.2834086507424145, + "grad_norm": 1.3209626543350923, + "learning_rate": 0.0002982, + "loss": 2.3255, + "step": 1988 + }, + { + "epoch": 1.2840542285345384, + "grad_norm": 1.1547878582066724, + "learning_rate": 0.00029835, + "loss": 2.2678, + "step": 1989 + }, + { + "epoch": 1.2846998063266624, + "grad_norm": 1.0460383155803423, + "learning_rate": 0.0002985, + "loss": 2.2264, + "step": 1990 + }, + { + "epoch": 1.2853453841187863, + "grad_norm": 1.313887871779951, + "learning_rate": 0.00029864999999999997, + "loss": 2.3789, + "step": 1991 + }, + { + "epoch": 1.2859909619109102, + "grad_norm": 1.2463087232519634, + "learning_rate": 0.0002988, + "loss": 2.6674, + "step": 1992 + }, + { + "epoch": 1.2866365397030342, + "grad_norm": 1.1693082921010571, + "learning_rate": 0.00029895, + "loss": 2.4255, + "step": 1993 + }, + { + "epoch": 1.287282117495158, + "grad_norm": 1.1096747461483, + "learning_rate": 0.00029909999999999995, + "loss": 2.408, + "step": 1994 + }, + { + "epoch": 1.2879276952872822, + "grad_norm": 1.1173867651950977, + "learning_rate": 0.00029925, + "loss": 2.3924, + "step": 1995 + }, + { + "epoch": 1.2885732730794062, + "grad_norm": 1.126898466314935, + "learning_rate": 0.00029939999999999996, + "loss": 2.4361, + "step": 1996 + }, + { + "epoch": 1.28921885087153, + "grad_norm": 1.1414443350524723, + "learning_rate": 0.00029955, + "loss": 2.3155, + "step": 1997 + }, + { + "epoch": 1.289864428663654, + "grad_norm": 1.0933612822198766, + "learning_rate": 0.00029969999999999997, + "loss": 2.2858, + "step": 1998 + }, + { + "epoch": 1.290510006455778, + "grad_norm": 1.2623722551850662, + "learning_rate": 0.00029985, + "loss": 2.4455, + "step": 1999 + }, + { + "epoch": 1.2911555842479019, + "grad_norm": 1.1501836438588, + "learning_rate": 0.0003, + "loss": 2.0592, + "step": 2000 + }, + { + "epoch": 1.2918011620400258, + "grad_norm": 1.180014832207553, + "learning_rate": 0.0002999999860867213, + "loss": 2.4269, + "step": 2001 + }, + { + "epoch": 1.2924467398321497, + "grad_norm": 1.3154906414680074, + "learning_rate": 0.0002999999443468878, + "loss": 2.5619, + "step": 2002 + }, + { + "epoch": 1.2930923176242737, + "grad_norm": 1.355276798162191, + "learning_rate": 0.0002999998747805073, + "loss": 2.4273, + "step": 2003 + }, + { + "epoch": 1.2937378954163976, + "grad_norm": 1.2052905752738698, + "learning_rate": 0.0002999997773875927, + "loss": 2.3892, + "step": 2004 + }, + { + "epoch": 1.2943834732085215, + "grad_norm": 1.1637429150366583, + "learning_rate": 0.000299999652168162, + "loss": 2.386, + "step": 2005 + }, + { + "epoch": 1.2950290510006455, + "grad_norm": 1.2516489990375987, + "learning_rate": 0.00029999949912223845, + "loss": 2.4471, + "step": 2006 + }, + { + "epoch": 1.2956746287927694, + "grad_norm": 1.1509973451718716, + "learning_rate": 0.0002999993182498504, + "loss": 2.2703, + "step": 2007 + }, + { + "epoch": 1.2963202065848936, + "grad_norm": 1.270933943703333, + "learning_rate": 0.00029999910955103156, + "loss": 2.6034, + "step": 2008 + }, + { + "epoch": 1.2969657843770175, + "grad_norm": 1.3286989942279075, + "learning_rate": 0.0002999988730258205, + "loss": 2.5796, + "step": 2009 + }, + { + "epoch": 1.2976113621691414, + "grad_norm": 1.2732558079203993, + "learning_rate": 0.0002999986086742611, + "loss": 2.6423, + "step": 2010 + }, + { + "epoch": 1.2982569399612653, + "grad_norm": 1.3084667205474794, + "learning_rate": 0.0002999983164964025, + "loss": 1.9616, + "step": 2011 + }, + { + "epoch": 1.2989025177533893, + "grad_norm": 1.1002118374894803, + "learning_rate": 0.0002999979964922988, + "loss": 2.2171, + "step": 2012 + }, + { + "epoch": 1.2995480955455132, + "grad_norm": 1.0681981556067182, + "learning_rate": 0.00029999764866200945, + "loss": 2.3629, + "step": 2013 + }, + { + "epoch": 1.3001936733376371, + "grad_norm": 1.0246411760552951, + "learning_rate": 0.00029999727300559896, + "loss": 2.0712, + "step": 2014 + }, + { + "epoch": 1.300839251129761, + "grad_norm": 1.1586286253372176, + "learning_rate": 0.0002999968695231369, + "loss": 2.2747, + "step": 2015 + }, + { + "epoch": 1.3014848289218852, + "grad_norm": 1.056958778521859, + "learning_rate": 0.00029999643821469827, + "loss": 2.0124, + "step": 2016 + }, + { + "epoch": 1.3021304067140091, + "grad_norm": 1.2358435113090165, + "learning_rate": 0.000299995979080363, + "loss": 2.3068, + "step": 2017 + }, + { + "epoch": 1.302775984506133, + "grad_norm": 1.1700234129820164, + "learning_rate": 0.0002999954921202163, + "loss": 2.5638, + "step": 2018 + }, + { + "epoch": 1.303421562298257, + "grad_norm": 1.1849467197569223, + "learning_rate": 0.00029999497733434853, + "loss": 2.5917, + "step": 2019 + }, + { + "epoch": 1.304067140090381, + "grad_norm": 1.1325232377052659, + "learning_rate": 0.0002999944347228551, + "loss": 2.4332, + "step": 2020 + }, + { + "epoch": 1.3047127178825049, + "grad_norm": 1.1087865049331849, + "learning_rate": 0.0002999938642858367, + "loss": 2.465, + "step": 2021 + }, + { + "epoch": 1.3053582956746288, + "grad_norm": 1.2317038374235618, + "learning_rate": 0.00029999326602339924, + "loss": 2.3609, + "step": 2022 + }, + { + "epoch": 1.3060038734667527, + "grad_norm": 1.0533600694195424, + "learning_rate": 0.0002999926399356536, + "loss": 2.0781, + "step": 2023 + }, + { + "epoch": 1.3066494512588767, + "grad_norm": 1.082235018675412, + "learning_rate": 0.000299991986022716, + "loss": 2.4444, + "step": 2024 + }, + { + "epoch": 1.3072950290510006, + "grad_norm": 1.2252095480890948, + "learning_rate": 0.00029999130428470767, + "loss": 2.4347, + "step": 2025 + }, + { + "epoch": 1.3079406068431245, + "grad_norm": 1.14058631049766, + "learning_rate": 0.00029999059472175517, + "loss": 2.012, + "step": 2026 + }, + { + "epoch": 1.3085861846352485, + "grad_norm": 1.2244828353962431, + "learning_rate": 0.00029998985733399006, + "loss": 2.2806, + "step": 2027 + }, + { + "epoch": 1.3092317624273724, + "grad_norm": 1.247915792516264, + "learning_rate": 0.00029998909212154907, + "loss": 2.4699, + "step": 2028 + }, + { + "epoch": 1.3098773402194963, + "grad_norm": 1.233672681617502, + "learning_rate": 0.00029998829908457433, + "loss": 2.3918, + "step": 2029 + }, + { + "epoch": 1.3105229180116205, + "grad_norm": 1.186757722789274, + "learning_rate": 0.00029998747822321286, + "loss": 2.4247, + "step": 2030 + }, + { + "epoch": 1.3111684958037444, + "grad_norm": 1.3104794041737475, + "learning_rate": 0.00029998662953761694, + "loss": 2.0576, + "step": 2031 + }, + { + "epoch": 1.3118140735958683, + "grad_norm": 1.230172741304354, + "learning_rate": 0.000299985753027944, + "loss": 2.5205, + "step": 2032 + }, + { + "epoch": 1.3124596513879923, + "grad_norm": 1.0380246839295455, + "learning_rate": 0.00029998484869435665, + "loss": 2.1112, + "step": 2033 + }, + { + "epoch": 1.3131052291801162, + "grad_norm": 1.2333259886487704, + "learning_rate": 0.00029998391653702274, + "loss": 2.4158, + "step": 2034 + }, + { + "epoch": 1.3137508069722401, + "grad_norm": 1.1833064251569514, + "learning_rate": 0.00029998295655611504, + "loss": 2.5265, + "step": 2035 + }, + { + "epoch": 1.314396384764364, + "grad_norm": 1.1150825432370617, + "learning_rate": 0.00029998196875181173, + "loss": 2.2048, + "step": 2036 + }, + { + "epoch": 1.315041962556488, + "grad_norm": 1.0892544719861115, + "learning_rate": 0.000299980953124296, + "loss": 2.314, + "step": 2037 + }, + { + "epoch": 1.3156875403486121, + "grad_norm": 1.1427465047970358, + "learning_rate": 0.0002999799096737564, + "loss": 2.2809, + "step": 2038 + }, + { + "epoch": 1.316333118140736, + "grad_norm": 1.1024816382104234, + "learning_rate": 0.0002999788384003863, + "loss": 2.2809, + "step": 2039 + }, + { + "epoch": 1.31697869593286, + "grad_norm": 1.1773644188268062, + "learning_rate": 0.0002999777393043845, + "loss": 2.2955, + "step": 2040 + }, + { + "epoch": 1.317624273724984, + "grad_norm": 1.350329497541108, + "learning_rate": 0.000299976612385955, + "loss": 2.4844, + "step": 2041 + }, + { + "epoch": 1.3182698515171078, + "grad_norm": 1.1825639025023464, + "learning_rate": 0.0002999754576453068, + "loss": 2.3639, + "step": 2042 + }, + { + "epoch": 1.3189154293092318, + "grad_norm": 1.2760744372792516, + "learning_rate": 0.0002999742750826541, + "loss": 2.5813, + "step": 2043 + }, + { + "epoch": 1.3195610071013557, + "grad_norm": 1.250124826072092, + "learning_rate": 0.0002999730646982162, + "loss": 2.5937, + "step": 2044 + }, + { + "epoch": 1.3202065848934796, + "grad_norm": 1.2861755201058915, + "learning_rate": 0.00029997182649221777, + "loss": 2.3033, + "step": 2045 + }, + { + "epoch": 1.3208521626856036, + "grad_norm": 1.161307452497164, + "learning_rate": 0.00029997056046488846, + "loss": 2.5636, + "step": 2046 + }, + { + "epoch": 1.3214977404777275, + "grad_norm": 1.0785068531013873, + "learning_rate": 0.0002999692666164631, + "loss": 2.3441, + "step": 2047 + }, + { + "epoch": 1.3221433182698514, + "grad_norm": 1.179931748482674, + "learning_rate": 0.00029996794494718175, + "loss": 2.4865, + "step": 2048 + }, + { + "epoch": 1.3227888960619754, + "grad_norm": 1.1708991890910843, + "learning_rate": 0.0002999665954572896, + "loss": 2.5125, + "step": 2049 + }, + { + "epoch": 1.3234344738540993, + "grad_norm": 1.1055465076209368, + "learning_rate": 0.00029996521814703697, + "loss": 2.4423, + "step": 2050 + }, + { + "epoch": 1.3240800516462234, + "grad_norm": 1.1006816606920624, + "learning_rate": 0.0002999638130166793, + "loss": 1.9833, + "step": 2051 + }, + { + "epoch": 1.3247256294383474, + "grad_norm": 1.1339615261769358, + "learning_rate": 0.0002999623800664774, + "loss": 2.219, + "step": 2052 + }, + { + "epoch": 1.3253712072304713, + "grad_norm": 1.1854859202806216, + "learning_rate": 0.00029996091929669703, + "loss": 2.0273, + "step": 2053 + }, + { + "epoch": 1.3260167850225952, + "grad_norm": 1.2347046849196024, + "learning_rate": 0.0002999594307076091, + "loss": 2.4735, + "step": 2054 + }, + { + "epoch": 1.3266623628147192, + "grad_norm": 1.2287627030821322, + "learning_rate": 0.00029995791429948985, + "loss": 2.2668, + "step": 2055 + }, + { + "epoch": 1.327307940606843, + "grad_norm": 1.2025961165262888, + "learning_rate": 0.00029995637007262065, + "loss": 2.4218, + "step": 2056 + }, + { + "epoch": 1.327953518398967, + "grad_norm": 1.2075451237428338, + "learning_rate": 0.0002999547980272878, + "loss": 2.3268, + "step": 2057 + }, + { + "epoch": 1.328599096191091, + "grad_norm": 1.0656865015539345, + "learning_rate": 0.00029995319816378306, + "loss": 2.3608, + "step": 2058 + }, + { + "epoch": 1.329244673983215, + "grad_norm": 1.120238637976705, + "learning_rate": 0.0002999515704824032, + "loss": 2.3282, + "step": 2059 + }, + { + "epoch": 1.329890251775339, + "grad_norm": 1.179781056524266, + "learning_rate": 0.0002999499149834501, + "loss": 2.2202, + "step": 2060 + }, + { + "epoch": 1.330535829567463, + "grad_norm": 1.0876432976283648, + "learning_rate": 0.00029994823166723094, + "loss": 2.2374, + "step": 2061 + }, + { + "epoch": 1.331181407359587, + "grad_norm": 1.1191366818914277, + "learning_rate": 0.00029994652053405807, + "loss": 2.3248, + "step": 2062 + }, + { + "epoch": 1.3318269851517108, + "grad_norm": 1.2492011668402803, + "learning_rate": 0.0002999447815842487, + "loss": 2.5158, + "step": 2063 + }, + { + "epoch": 1.3324725629438348, + "grad_norm": 1.1889795244759762, + "learning_rate": 0.0002999430148181257, + "loss": 2.2396, + "step": 2064 + }, + { + "epoch": 1.3331181407359587, + "grad_norm": 1.1232981321455688, + "learning_rate": 0.00029994122023601656, + "loss": 2.0043, + "step": 2065 + }, + { + "epoch": 1.3337637185280826, + "grad_norm": 1.1434296646521056, + "learning_rate": 0.0002999393978382543, + "loss": 2.2088, + "step": 2066 + }, + { + "epoch": 1.3344092963202066, + "grad_norm": 1.121153388993566, + "learning_rate": 0.00029993754762517704, + "loss": 2.2854, + "step": 2067 + }, + { + "epoch": 1.3350548741123305, + "grad_norm": 1.3514615311728182, + "learning_rate": 0.00029993566959712805, + "loss": 2.575, + "step": 2068 + }, + { + "epoch": 1.3357004519044544, + "grad_norm": 1.0605946962671096, + "learning_rate": 0.00029993376375445557, + "loss": 2.3809, + "step": 2069 + }, + { + "epoch": 1.3363460296965783, + "grad_norm": 1.17253672930298, + "learning_rate": 0.00029993183009751327, + "loss": 2.0657, + "step": 2070 + }, + { + "epoch": 1.3369916074887023, + "grad_norm": 1.1246331819968713, + "learning_rate": 0.0002999298686266598, + "loss": 2.314, + "step": 2071 + }, + { + "epoch": 1.3376371852808262, + "grad_norm": 1.0517747410856086, + "learning_rate": 0.0002999278793422591, + "loss": 2.2135, + "step": 2072 + }, + { + "epoch": 1.3382827630729504, + "grad_norm": 1.117563794204413, + "learning_rate": 0.00029992586224468015, + "loss": 2.4883, + "step": 2073 + }, + { + "epoch": 1.3389283408650743, + "grad_norm": 1.0662647452728131, + "learning_rate": 0.00029992381733429715, + "loss": 2.3178, + "step": 2074 + }, + { + "epoch": 1.3395739186571982, + "grad_norm": 1.3424081886125554, + "learning_rate": 0.00029992174461148947, + "loss": 2.3889, + "step": 2075 + }, + { + "epoch": 1.3402194964493221, + "grad_norm": 1.3389031749225888, + "learning_rate": 0.0002999196440766416, + "loss": 2.8141, + "step": 2076 + }, + { + "epoch": 1.340865074241446, + "grad_norm": 1.4846946819352742, + "learning_rate": 0.00029991751573014324, + "loss": 2.5226, + "step": 2077 + }, + { + "epoch": 1.34151065203357, + "grad_norm": 1.2078660131197005, + "learning_rate": 0.0002999153595723892, + "loss": 2.5357, + "step": 2078 + }, + { + "epoch": 1.342156229825694, + "grad_norm": 1.328268058080377, + "learning_rate": 0.00029991317560377947, + "loss": 2.6765, + "step": 2079 + }, + { + "epoch": 1.3428018076178179, + "grad_norm": 1.2042950125358116, + "learning_rate": 0.0002999109638247192, + "loss": 2.4786, + "step": 2080 + }, + { + "epoch": 1.343447385409942, + "grad_norm": 1.222217012135496, + "learning_rate": 0.00029990872423561873, + "loss": 2.4892, + "step": 2081 + }, + { + "epoch": 1.344092963202066, + "grad_norm": 1.1863572325070966, + "learning_rate": 0.00029990645683689347, + "loss": 2.1855, + "step": 2082 + }, + { + "epoch": 1.3447385409941899, + "grad_norm": 1.1586933200209346, + "learning_rate": 0.0002999041616289641, + "loss": 2.2732, + "step": 2083 + }, + { + "epoch": 1.3453841187863138, + "grad_norm": 1.2320910161541325, + "learning_rate": 0.00029990183861225634, + "loss": 2.5233, + "step": 2084 + }, + { + "epoch": 1.3460296965784377, + "grad_norm": 1.3331102504011043, + "learning_rate": 0.0002998994877872012, + "loss": 2.4615, + "step": 2085 + }, + { + "epoch": 1.3466752743705617, + "grad_norm": 1.0887086585517607, + "learning_rate": 0.00029989710915423473, + "loss": 2.5081, + "step": 2086 + }, + { + "epoch": 1.3473208521626856, + "grad_norm": 1.147136978675252, + "learning_rate": 0.0002998947027137982, + "loss": 2.4854, + "step": 2087 + }, + { + "epoch": 1.3479664299548095, + "grad_norm": 1.0992394544496966, + "learning_rate": 0.00029989226846633814, + "loss": 2.2114, + "step": 2088 + }, + { + "epoch": 1.3486120077469335, + "grad_norm": 1.305709092847088, + "learning_rate": 0.0002998898064123059, + "loss": 2.4888, + "step": 2089 + }, + { + "epoch": 1.3492575855390574, + "grad_norm": 1.1744318942776468, + "learning_rate": 0.00029988731655215844, + "loss": 2.1273, + "step": 2090 + }, + { + "epoch": 1.3499031633311813, + "grad_norm": 1.19882518394649, + "learning_rate": 0.00029988479888635756, + "loss": 2.2825, + "step": 2091 + }, + { + "epoch": 1.3505487411233053, + "grad_norm": 1.2154356714041192, + "learning_rate": 0.0002998822534153703, + "loss": 2.54, + "step": 2092 + }, + { + "epoch": 1.3511943189154292, + "grad_norm": 1.1722968266664064, + "learning_rate": 0.0002998796801396689, + "loss": 2.3544, + "step": 2093 + }, + { + "epoch": 1.3518398967075533, + "grad_norm": 1.1074797841295092, + "learning_rate": 0.00029987707905973076, + "loss": 2.2205, + "step": 2094 + }, + { + "epoch": 1.3524854744996773, + "grad_norm": 1.2195103023933465, + "learning_rate": 0.00029987445017603826, + "loss": 2.3414, + "step": 2095 + }, + { + "epoch": 1.3531310522918012, + "grad_norm": 1.070945355937561, + "learning_rate": 0.0002998717934890793, + "loss": 2.3855, + "step": 2096 + }, + { + "epoch": 1.3537766300839251, + "grad_norm": 1.1929405426290094, + "learning_rate": 0.0002998691089993465, + "loss": 2.4942, + "step": 2097 + }, + { + "epoch": 1.354422207876049, + "grad_norm": 1.183434573643989, + "learning_rate": 0.0002998663967073381, + "loss": 2.4758, + "step": 2098 + }, + { + "epoch": 1.355067785668173, + "grad_norm": 1.0677644330602367, + "learning_rate": 0.0002998636566135571, + "loss": 2.4132, + "step": 2099 + }, + { + "epoch": 1.355713363460297, + "grad_norm": 1.1409615155513408, + "learning_rate": 0.00029986088871851175, + "loss": 2.5552, + "step": 2100 + }, + { + "epoch": 1.3563589412524208, + "grad_norm": 1.0639894808962174, + "learning_rate": 0.0002998580930227157, + "loss": 2.2614, + "step": 2101 + }, + { + "epoch": 1.357004519044545, + "grad_norm": 1.2489176155984083, + "learning_rate": 0.0002998552695266875, + "loss": 2.337, + "step": 2102 + }, + { + "epoch": 1.357650096836669, + "grad_norm": 1.1067326130992645, + "learning_rate": 0.00029985241823095093, + "loss": 2.2197, + "step": 2103 + }, + { + "epoch": 1.3582956746287929, + "grad_norm": 1.182972875421993, + "learning_rate": 0.000299849539136035, + "loss": 2.6118, + "step": 2104 + }, + { + "epoch": 1.3589412524209168, + "grad_norm": 1.160469993004198, + "learning_rate": 0.0002998466322424737, + "loss": 2.3199, + "step": 2105 + }, + { + "epoch": 1.3595868302130407, + "grad_norm": 1.3228580444271347, + "learning_rate": 0.00029984369755080634, + "loss": 2.4743, + "step": 2106 + }, + { + "epoch": 1.3602324080051647, + "grad_norm": 1.123389267240516, + "learning_rate": 0.0002998407350615774, + "loss": 2.67, + "step": 2107 + }, + { + "epoch": 1.3608779857972886, + "grad_norm": 1.1814210890574608, + "learning_rate": 0.00029983774477533636, + "loss": 2.5368, + "step": 2108 + }, + { + "epoch": 1.3615235635894125, + "grad_norm": 1.1915638737706262, + "learning_rate": 0.00029983472669263794, + "loss": 2.3762, + "step": 2109 + }, + { + "epoch": 1.3621691413815364, + "grad_norm": 1.1977528460540614, + "learning_rate": 0.00029983168081404216, + "loss": 2.3824, + "step": 2110 + }, + { + "epoch": 1.3628147191736604, + "grad_norm": 1.3255332852916282, + "learning_rate": 0.0002998286071401139, + "loss": 2.6903, + "step": 2111 + }, + { + "epoch": 1.3634602969657843, + "grad_norm": 1.2548103608121397, + "learning_rate": 0.00029982550567142345, + "loss": 2.3405, + "step": 2112 + }, + { + "epoch": 1.3641058747579082, + "grad_norm": 1.1698463369263574, + "learning_rate": 0.00029982237640854614, + "loss": 2.2566, + "step": 2113 + }, + { + "epoch": 1.3647514525500322, + "grad_norm": 1.176360302232732, + "learning_rate": 0.0002998192193520625, + "loss": 2.6341, + "step": 2114 + }, + { + "epoch": 1.365397030342156, + "grad_norm": 1.1913845375469556, + "learning_rate": 0.0002998160345025582, + "loss": 2.3895, + "step": 2115 + }, + { + "epoch": 1.3660426081342802, + "grad_norm": 1.2254658910080147, + "learning_rate": 0.00029981282186062404, + "loss": 2.4104, + "step": 2116 + }, + { + "epoch": 1.3666881859264042, + "grad_norm": 1.341243699404432, + "learning_rate": 0.000299809581426856, + "loss": 2.6645, + "step": 2117 + }, + { + "epoch": 1.367333763718528, + "grad_norm": 1.1322313474922956, + "learning_rate": 0.0002998063132018552, + "loss": 2.3905, + "step": 2118 + }, + { + "epoch": 1.367979341510652, + "grad_norm": 1.205501484352292, + "learning_rate": 0.00029980301718622793, + "loss": 2.1232, + "step": 2119 + }, + { + "epoch": 1.368624919302776, + "grad_norm": 1.105718315578568, + "learning_rate": 0.00029979969338058565, + "loss": 2.4647, + "step": 2120 + }, + { + "epoch": 1.3692704970949, + "grad_norm": 1.1503636020869457, + "learning_rate": 0.00029979634178554503, + "loss": 2.1955, + "step": 2121 + }, + { + "epoch": 1.3699160748870238, + "grad_norm": 1.1142210077680594, + "learning_rate": 0.0002997929624017277, + "loss": 2.4127, + "step": 2122 + }, + { + "epoch": 1.3705616526791478, + "grad_norm": 1.1230571365080912, + "learning_rate": 0.00029978955522976067, + "loss": 2.4119, + "step": 2123 + }, + { + "epoch": 1.371207230471272, + "grad_norm": 1.1086336522910214, + "learning_rate": 0.00029978612027027596, + "loss": 2.3832, + "step": 2124 + }, + { + "epoch": 1.3718528082633958, + "grad_norm": 1.0661250039296724, + "learning_rate": 0.00029978265752391076, + "loss": 2.3443, + "step": 2125 + }, + { + "epoch": 1.3724983860555198, + "grad_norm": 1.1802803210749075, + "learning_rate": 0.0002997791669913075, + "loss": 2.3635, + "step": 2126 + }, + { + "epoch": 1.3731439638476437, + "grad_norm": 1.1372909610427837, + "learning_rate": 0.0002997756486731137, + "loss": 2.4736, + "step": 2127 + }, + { + "epoch": 1.3737895416397676, + "grad_norm": 1.1116306997699081, + "learning_rate": 0.00029977210256998204, + "loss": 2.1171, + "step": 2128 + }, + { + "epoch": 1.3744351194318916, + "grad_norm": 1.0773862693066705, + "learning_rate": 0.00029976852868257033, + "loss": 1.8559, + "step": 2129 + }, + { + "epoch": 1.3750806972240155, + "grad_norm": 1.1953755877928118, + "learning_rate": 0.00029976492701154164, + "loss": 2.4638, + "step": 2130 + }, + { + "epoch": 1.3757262750161394, + "grad_norm": 1.217173118167107, + "learning_rate": 0.000299761297557564, + "loss": 2.4839, + "step": 2131 + }, + { + "epoch": 1.3763718528082634, + "grad_norm": 1.104989182486241, + "learning_rate": 0.00029975764032131084, + "loss": 2.1633, + "step": 2132 + }, + { + "epoch": 1.3770174306003873, + "grad_norm": 1.2428722605014515, + "learning_rate": 0.0002997539553034605, + "loss": 2.5983, + "step": 2133 + }, + { + "epoch": 1.3776630083925112, + "grad_norm": 1.1482893524157318, + "learning_rate": 0.00029975024250469666, + "loss": 2.3571, + "step": 2134 + }, + { + "epoch": 1.3783085861846351, + "grad_norm": 1.0951918828938016, + "learning_rate": 0.000299746501925708, + "loss": 2.2052, + "step": 2135 + }, + { + "epoch": 1.378954163976759, + "grad_norm": 1.2147983930957622, + "learning_rate": 0.00029974273356718863, + "loss": 2.2909, + "step": 2136 + }, + { + "epoch": 1.3795997417688832, + "grad_norm": 1.181936554086293, + "learning_rate": 0.0002997389374298374, + "loss": 2.5534, + "step": 2137 + }, + { + "epoch": 1.3802453195610072, + "grad_norm": 1.100208715906329, + "learning_rate": 0.0002997351135143587, + "loss": 2.1993, + "step": 2138 + }, + { + "epoch": 1.380890897353131, + "grad_norm": 1.178525872123655, + "learning_rate": 0.0002997312618214617, + "loss": 2.5118, + "step": 2139 + }, + { + "epoch": 1.381536475145255, + "grad_norm": 1.1921884300207772, + "learning_rate": 0.0002997273823518612, + "loss": 2.5765, + "step": 2140 + }, + { + "epoch": 1.382182052937379, + "grad_norm": 1.1424256922223073, + "learning_rate": 0.00029972347510627664, + "loss": 2.4575, + "step": 2141 + }, + { + "epoch": 1.3828276307295029, + "grad_norm": 1.1716302067630242, + "learning_rate": 0.00029971954008543297, + "loss": 2.263, + "step": 2142 + }, + { + "epoch": 1.3834732085216268, + "grad_norm": 1.113442822487553, + "learning_rate": 0.0002997155772900602, + "loss": 2.4779, + "step": 2143 + }, + { + "epoch": 1.3841187863137507, + "grad_norm": 1.133411713275566, + "learning_rate": 0.0002997115867208934, + "loss": 2.4472, + "step": 2144 + }, + { + "epoch": 1.384764364105875, + "grad_norm": 1.1787657386950543, + "learning_rate": 0.0002997075683786729, + "loss": 2.4753, + "step": 2145 + }, + { + "epoch": 1.3854099418979988, + "grad_norm": 1.0990400883128193, + "learning_rate": 0.00029970352226414416, + "loss": 2.2328, + "step": 2146 + }, + { + "epoch": 1.3860555196901228, + "grad_norm": 1.1284796663055974, + "learning_rate": 0.0002996994483780577, + "loss": 1.9578, + "step": 2147 + }, + { + "epoch": 1.3867010974822467, + "grad_norm": 1.2217793999790936, + "learning_rate": 0.0002996953467211694, + "loss": 2.3998, + "step": 2148 + }, + { + "epoch": 1.3873466752743706, + "grad_norm": 1.2520596327232623, + "learning_rate": 0.00029969121729424004, + "loss": 2.2602, + "step": 2149 + }, + { + "epoch": 1.3879922530664945, + "grad_norm": 1.2665980072948133, + "learning_rate": 0.00029968706009803574, + "loss": 2.5958, + "step": 2150 + }, + { + "epoch": 1.3886378308586185, + "grad_norm": 1.1838034583681973, + "learning_rate": 0.00029968287513332763, + "loss": 2.4286, + "step": 2151 + }, + { + "epoch": 1.3892834086507424, + "grad_norm": 1.1551974552944704, + "learning_rate": 0.0002996786624008921, + "loss": 2.4938, + "step": 2152 + }, + { + "epoch": 1.3899289864428663, + "grad_norm": 1.0403293892560141, + "learning_rate": 0.00029967442190151073, + "loss": 2.3125, + "step": 2153 + }, + { + "epoch": 1.3905745642349903, + "grad_norm": 1.2335868269742862, + "learning_rate": 0.0002996701536359701, + "loss": 2.6838, + "step": 2154 + }, + { + "epoch": 1.3912201420271142, + "grad_norm": 1.0862248092956277, + "learning_rate": 0.000299665857605062, + "loss": 2.3186, + "step": 2155 + }, + { + "epoch": 1.3918657198192381, + "grad_norm": 1.2245677503594004, + "learning_rate": 0.0002996615338095834, + "loss": 2.502, + "step": 2156 + }, + { + "epoch": 1.392511297611362, + "grad_norm": 1.0897526900450463, + "learning_rate": 0.0002996571822503365, + "loss": 2.2477, + "step": 2157 + }, + { + "epoch": 1.393156875403486, + "grad_norm": 1.2195238901260372, + "learning_rate": 0.0002996528029281284, + "loss": 2.6921, + "step": 2158 + }, + { + "epoch": 1.3938024531956101, + "grad_norm": 1.0845887056132557, + "learning_rate": 0.0002996483958437717, + "loss": 2.353, + "step": 2159 + }, + { + "epoch": 1.394448030987734, + "grad_norm": 1.1550208598752059, + "learning_rate": 0.0002996439609980838, + "loss": 2.4741, + "step": 2160 + }, + { + "epoch": 1.395093608779858, + "grad_norm": 1.0555307367134192, + "learning_rate": 0.0002996394983918874, + "loss": 2.2295, + "step": 2161 + }, + { + "epoch": 1.395739186571982, + "grad_norm": 1.1085300646544913, + "learning_rate": 0.00029963500802601055, + "loss": 2.2351, + "step": 2162 + }, + { + "epoch": 1.3963847643641059, + "grad_norm": 1.1237653650657984, + "learning_rate": 0.0002996304899012861, + "loss": 2.2278, + "step": 2163 + }, + { + "epoch": 1.3970303421562298, + "grad_norm": 1.326629289183326, + "learning_rate": 0.00029962594401855216, + "loss": 2.6681, + "step": 2164 + }, + { + "epoch": 1.3976759199483537, + "grad_norm": 1.1849137958481635, + "learning_rate": 0.00029962137037865225, + "loss": 2.3725, + "step": 2165 + }, + { + "epoch": 1.3983214977404776, + "grad_norm": 1.1553579943635688, + "learning_rate": 0.00029961676898243464, + "loss": 2.4873, + "step": 2166 + }, + { + "epoch": 1.3989670755326018, + "grad_norm": 1.2291416013190255, + "learning_rate": 0.000299612139830753, + "loss": 2.438, + "step": 2167 + }, + { + "epoch": 1.3996126533247257, + "grad_norm": 1.2038908269318636, + "learning_rate": 0.00029960748292446606, + "loss": 2.5618, + "step": 2168 + }, + { + "epoch": 1.4002582311168497, + "grad_norm": 1.1170239418134313, + "learning_rate": 0.0002996027982644378, + "loss": 2.5098, + "step": 2169 + }, + { + "epoch": 1.4009038089089736, + "grad_norm": 1.0685561821209653, + "learning_rate": 0.00029959808585153713, + "loss": 2.2836, + "step": 2170 + }, + { + "epoch": 1.4015493867010975, + "grad_norm": 1.1648709109879463, + "learning_rate": 0.00029959334568663846, + "loss": 2.3866, + "step": 2171 + }, + { + "epoch": 1.4021949644932215, + "grad_norm": 1.1349074454365593, + "learning_rate": 0.00029958857777062094, + "loss": 2.4667, + "step": 2172 + }, + { + "epoch": 1.4028405422853454, + "grad_norm": 1.3007097264107497, + "learning_rate": 0.0002995837821043692, + "loss": 2.5096, + "step": 2173 + }, + { + "epoch": 1.4034861200774693, + "grad_norm": 1.0592187231764658, + "learning_rate": 0.00029957895868877284, + "loss": 2.2112, + "step": 2174 + }, + { + "epoch": 1.4041316978695932, + "grad_norm": 1.2344762965050522, + "learning_rate": 0.0002995741075247266, + "loss": 2.4773, + "step": 2175 + }, + { + "epoch": 1.4047772756617172, + "grad_norm": 1.1272551608660337, + "learning_rate": 0.0002995692286131305, + "loss": 2.5136, + "step": 2176 + }, + { + "epoch": 1.405422853453841, + "grad_norm": 1.086975519300901, + "learning_rate": 0.0002995643219548896, + "loss": 2.3737, + "step": 2177 + }, + { + "epoch": 1.406068431245965, + "grad_norm": 1.2651403341085006, + "learning_rate": 0.0002995593875509141, + "loss": 2.514, + "step": 2178 + }, + { + "epoch": 1.406714009038089, + "grad_norm": 1.1566216915850616, + "learning_rate": 0.0002995544254021195, + "loss": 2.5295, + "step": 2179 + }, + { + "epoch": 1.4073595868302131, + "grad_norm": 1.0121111720717375, + "learning_rate": 0.0002995494355094262, + "loss": 2.2044, + "step": 2180 + }, + { + "epoch": 1.408005164622337, + "grad_norm": 1.2514788807527946, + "learning_rate": 0.0002995444178737599, + "loss": 2.4208, + "step": 2181 + }, + { + "epoch": 1.408650742414461, + "grad_norm": 1.3302900503263848, + "learning_rate": 0.0002995393724960515, + "loss": 2.6851, + "step": 2182 + }, + { + "epoch": 1.409296320206585, + "grad_norm": 1.1208552162936902, + "learning_rate": 0.00029953429937723693, + "loss": 2.4531, + "step": 2183 + }, + { + "epoch": 1.4099418979987088, + "grad_norm": 1.1909773659815766, + "learning_rate": 0.00029952919851825726, + "loss": 1.8804, + "step": 2184 + }, + { + "epoch": 1.4105874757908328, + "grad_norm": 1.3750535801075932, + "learning_rate": 0.0002995240699200588, + "loss": 2.662, + "step": 2185 + }, + { + "epoch": 1.4112330535829567, + "grad_norm": 1.199798885939508, + "learning_rate": 0.0002995189135835929, + "loss": 1.8909, + "step": 2186 + }, + { + "epoch": 1.4118786313750806, + "grad_norm": 1.0928579857939882, + "learning_rate": 0.0002995137295098162, + "loss": 2.1549, + "step": 2187 + }, + { + "epoch": 1.4125242091672048, + "grad_norm": 1.0434404155631367, + "learning_rate": 0.0002995085176996904, + "loss": 2.2148, + "step": 2188 + }, + { + "epoch": 1.4131697869593287, + "grad_norm": 1.1965982450093544, + "learning_rate": 0.00029950327815418226, + "loss": 2.3484, + "step": 2189 + }, + { + "epoch": 1.4138153647514526, + "grad_norm": 1.3484469585954186, + "learning_rate": 0.0002994980108742638, + "loss": 2.352, + "step": 2190 + }, + { + "epoch": 1.4144609425435766, + "grad_norm": 1.3026352451313485, + "learning_rate": 0.00029949271586091227, + "loss": 2.4238, + "step": 2191 + }, + { + "epoch": 1.4151065203357005, + "grad_norm": 1.3190143335533893, + "learning_rate": 0.0002994873931151097, + "loss": 2.5679, + "step": 2192 + }, + { + "epoch": 1.4157520981278244, + "grad_norm": 1.1648740462655216, + "learning_rate": 0.0002994820426378438, + "loss": 2.2137, + "step": 2193 + }, + { + "epoch": 1.4163976759199484, + "grad_norm": 1.231421890943682, + "learning_rate": 0.0002994766644301069, + "loss": 2.3527, + "step": 2194 + }, + { + "epoch": 1.4170432537120723, + "grad_norm": 1.3576775043438642, + "learning_rate": 0.0002994712584928969, + "loss": 2.4288, + "step": 2195 + }, + { + "epoch": 1.4176888315041962, + "grad_norm": 1.1806481379453035, + "learning_rate": 0.0002994658248272165, + "loss": 2.338, + "step": 2196 + }, + { + "epoch": 1.4183344092963202, + "grad_norm": 1.135005333591236, + "learning_rate": 0.0002994603634340739, + "loss": 2.3333, + "step": 2197 + }, + { + "epoch": 1.418979987088444, + "grad_norm": 1.402790929621036, + "learning_rate": 0.000299454874314482, + "loss": 2.5466, + "step": 2198 + }, + { + "epoch": 1.419625564880568, + "grad_norm": 1.383722159863877, + "learning_rate": 0.0002994493574694592, + "loss": 2.5079, + "step": 2199 + }, + { + "epoch": 1.420271142672692, + "grad_norm": 1.0829411978924237, + "learning_rate": 0.000299443812900029, + "loss": 2.1198, + "step": 2200 + }, + { + "epoch": 1.4209167204648159, + "grad_norm": 1.1037759925709525, + "learning_rate": 0.00029943824060721993, + "loss": 2.3442, + "step": 2201 + }, + { + "epoch": 1.42156229825694, + "grad_norm": 1.1899620268443987, + "learning_rate": 0.00029943264059206564, + "loss": 2.418, + "step": 2202 + }, + { + "epoch": 1.422207876049064, + "grad_norm": 1.1063095342955167, + "learning_rate": 0.0002994270128556051, + "loss": 2.3332, + "step": 2203 + }, + { + "epoch": 1.4228534538411879, + "grad_norm": 1.2056037168439655, + "learning_rate": 0.0002994213573988822, + "loss": 1.9821, + "step": 2204 + }, + { + "epoch": 1.4234990316333118, + "grad_norm": 1.150540004062052, + "learning_rate": 0.0002994156742229462, + "loss": 2.4926, + "step": 2205 + }, + { + "epoch": 1.4241446094254357, + "grad_norm": 1.1412738718418745, + "learning_rate": 0.0002994099633288513, + "loss": 2.5661, + "step": 2206 + }, + { + "epoch": 1.4247901872175597, + "grad_norm": 1.1800225509359195, + "learning_rate": 0.000299404224717657, + "loss": 2.3092, + "step": 2207 + }, + { + "epoch": 1.4254357650096836, + "grad_norm": 1.1201929977908585, + "learning_rate": 0.00029939845839042787, + "loss": 2.2944, + "step": 2208 + }, + { + "epoch": 1.4260813428018075, + "grad_norm": 1.2256362123196218, + "learning_rate": 0.0002993926643482335, + "loss": 2.2782, + "step": 2209 + }, + { + "epoch": 1.4267269205939317, + "grad_norm": 1.2709861124544886, + "learning_rate": 0.0002993868425921489, + "loss": 2.1641, + "step": 2210 + }, + { + "epoch": 1.4273724983860556, + "grad_norm": 1.1241796659967234, + "learning_rate": 0.00029938099312325396, + "loss": 1.98, + "step": 2211 + }, + { + "epoch": 1.4280180761781796, + "grad_norm": 1.1864214179359203, + "learning_rate": 0.00029937511594263386, + "loss": 2.3347, + "step": 2212 + }, + { + "epoch": 1.4286636539703035, + "grad_norm": 1.2410554321893865, + "learning_rate": 0.00029936921105137886, + "loss": 2.3973, + "step": 2213 + }, + { + "epoch": 1.4293092317624274, + "grad_norm": 1.2853715947048274, + "learning_rate": 0.0002993632784505844, + "loss": 2.644, + "step": 2214 + }, + { + "epoch": 1.4299548095545513, + "grad_norm": 1.2325428913013854, + "learning_rate": 0.0002993573181413511, + "loss": 2.5702, + "step": 2215 + }, + { + "epoch": 1.4306003873466753, + "grad_norm": 1.1636288640658836, + "learning_rate": 0.00029935133012478455, + "loss": 2.5556, + "step": 2216 + }, + { + "epoch": 1.4312459651387992, + "grad_norm": 1.1977805740741414, + "learning_rate": 0.0002993453144019956, + "loss": 2.5636, + "step": 2217 + }, + { + "epoch": 1.4318915429309231, + "grad_norm": 1.1450509785620653, + "learning_rate": 0.0002993392709741003, + "loss": 2.0567, + "step": 2218 + }, + { + "epoch": 1.432537120723047, + "grad_norm": 1.280895384813178, + "learning_rate": 0.0002993331998422197, + "loss": 2.2708, + "step": 2219 + }, + { + "epoch": 1.433182698515171, + "grad_norm": 1.129291421777449, + "learning_rate": 0.00029932710100748016, + "loss": 2.0996, + "step": 2220 + }, + { + "epoch": 1.433828276307295, + "grad_norm": 1.2144625545362087, + "learning_rate": 0.00029932097447101294, + "loss": 2.5722, + "step": 2221 + }, + { + "epoch": 1.4344738540994189, + "grad_norm": 1.2356458710624747, + "learning_rate": 0.0002993148202339546, + "loss": 2.3935, + "step": 2222 + }, + { + "epoch": 1.435119431891543, + "grad_norm": 1.2384729525205709, + "learning_rate": 0.00029930863829744694, + "loss": 2.4401, + "step": 2223 + }, + { + "epoch": 1.435765009683667, + "grad_norm": 1.1100715736318396, + "learning_rate": 0.0002993024286626366, + "loss": 2.33, + "step": 2224 + }, + { + "epoch": 1.4364105874757909, + "grad_norm": 1.0124183745595925, + "learning_rate": 0.00029929619133067575, + "loss": 1.8345, + "step": 2225 + }, + { + "epoch": 1.4370561652679148, + "grad_norm": 1.2461844870481058, + "learning_rate": 0.00029928992630272127, + "loss": 2.6892, + "step": 2226 + }, + { + "epoch": 1.4377017430600387, + "grad_norm": 1.2663880059983381, + "learning_rate": 0.0002992836335799355, + "loss": 2.4219, + "step": 2227 + }, + { + "epoch": 1.4383473208521627, + "grad_norm": 1.073973841634916, + "learning_rate": 0.00029927731316348573, + "loss": 2.1914, + "step": 2228 + }, + { + "epoch": 1.4389928986442866, + "grad_norm": 1.1412268980227673, + "learning_rate": 0.00029927096505454455, + "loss": 2.4508, + "step": 2229 + }, + { + "epoch": 1.4396384764364105, + "grad_norm": 1.1964768899903198, + "learning_rate": 0.00029926458925428955, + "loss": 2.3652, + "step": 2230 + }, + { + "epoch": 1.4402840542285347, + "grad_norm": 1.1315554810388604, + "learning_rate": 0.0002992581857639035, + "loss": 2.386, + "step": 2231 + }, + { + "epoch": 1.4409296320206586, + "grad_norm": 1.1272060340197647, + "learning_rate": 0.00029925175458457435, + "loss": 2.4388, + "step": 2232 + }, + { + "epoch": 1.4415752098127825, + "grad_norm": 1.2368328766169043, + "learning_rate": 0.0002992452957174951, + "loss": 2.4864, + "step": 2233 + }, + { + "epoch": 1.4422207876049065, + "grad_norm": 1.3257286405549316, + "learning_rate": 0.00029923880916386396, + "loss": 2.6844, + "step": 2234 + }, + { + "epoch": 1.4428663653970304, + "grad_norm": 1.18078782198041, + "learning_rate": 0.0002992322949248843, + "loss": 2.2761, + "step": 2235 + }, + { + "epoch": 1.4435119431891543, + "grad_norm": 1.2202228109377262, + "learning_rate": 0.0002992257530017645, + "loss": 2.4289, + "step": 2236 + }, + { + "epoch": 1.4441575209812783, + "grad_norm": 1.0756610432956588, + "learning_rate": 0.0002992191833957182, + "loss": 2.3619, + "step": 2237 + }, + { + "epoch": 1.4448030987734022, + "grad_norm": 1.065786099946732, + "learning_rate": 0.0002992125861079641, + "loss": 2.2404, + "step": 2238 + }, + { + "epoch": 1.4454486765655261, + "grad_norm": 1.193066091797322, + "learning_rate": 0.00029920596113972616, + "loss": 2.384, + "step": 2239 + }, + { + "epoch": 1.44609425435765, + "grad_norm": 1.1969001323088606, + "learning_rate": 0.00029919930849223327, + "loss": 2.3377, + "step": 2240 + }, + { + "epoch": 1.446739832149774, + "grad_norm": 1.1621637219102077, + "learning_rate": 0.0002991926281667196, + "loss": 2.4589, + "step": 2241 + }, + { + "epoch": 1.447385409941898, + "grad_norm": 1.1894097301893571, + "learning_rate": 0.0002991859201644244, + "loss": 2.4609, + "step": 2242 + }, + { + "epoch": 1.4480309877340218, + "grad_norm": 1.1152570397906167, + "learning_rate": 0.00029917918448659213, + "loss": 2.3242, + "step": 2243 + }, + { + "epoch": 1.4486765655261458, + "grad_norm": 1.1411569532588868, + "learning_rate": 0.0002991724211344723, + "loss": 2.019, + "step": 2244 + }, + { + "epoch": 1.44932214331827, + "grad_norm": 1.4573064457986291, + "learning_rate": 0.00029916563010931954, + "loss": 2.7351, + "step": 2245 + }, + { + "epoch": 1.4499677211103938, + "grad_norm": 1.345395171033234, + "learning_rate": 0.0002991588114123937, + "loss": 2.4793, + "step": 2246 + }, + { + "epoch": 1.4506132989025178, + "grad_norm": 1.1854814235642686, + "learning_rate": 0.00029915196504495974, + "loss": 2.4375, + "step": 2247 + }, + { + "epoch": 1.4512588766946417, + "grad_norm": 1.2559627568828664, + "learning_rate": 0.00029914509100828767, + "loss": 1.9598, + "step": 2248 + }, + { + "epoch": 1.4519044544867656, + "grad_norm": 1.074344542632205, + "learning_rate": 0.0002991381893036528, + "loss": 2.2439, + "step": 2249 + }, + { + "epoch": 1.4525500322788896, + "grad_norm": 1.275196767382471, + "learning_rate": 0.0002991312599323353, + "loss": 2.5189, + "step": 2250 + }, + { + "epoch": 1.4531956100710135, + "grad_norm": 1.0046758144593937, + "learning_rate": 0.0002991243028956208, + "loss": 2.2968, + "step": 2251 + }, + { + "epoch": 1.4538411878631374, + "grad_norm": 1.002148865396311, + "learning_rate": 0.00029911731819479983, + "loss": 2.1214, + "step": 2252 + }, + { + "epoch": 1.4544867656552616, + "grad_norm": 1.1132171063270666, + "learning_rate": 0.0002991103058311681, + "loss": 2.3121, + "step": 2253 + }, + { + "epoch": 1.4551323434473855, + "grad_norm": 1.017053263972578, + "learning_rate": 0.0002991032658060266, + "loss": 2.04, + "step": 2254 + }, + { + "epoch": 1.4557779212395094, + "grad_norm": 1.21829031437535, + "learning_rate": 0.00029909619812068115, + "loss": 2.6457, + "step": 2255 + }, + { + "epoch": 1.4564234990316334, + "grad_norm": 1.1192624449413722, + "learning_rate": 0.000299089102776443, + "loss": 2.4547, + "step": 2256 + }, + { + "epoch": 1.4570690768237573, + "grad_norm": 1.078387563623124, + "learning_rate": 0.0002990819797746284, + "loss": 2.3125, + "step": 2257 + }, + { + "epoch": 1.4577146546158812, + "grad_norm": 1.037380838402293, + "learning_rate": 0.0002990748291165587, + "loss": 2.1564, + "step": 2258 + }, + { + "epoch": 1.4583602324080052, + "grad_norm": 1.1678309233094761, + "learning_rate": 0.00029906765080356043, + "loss": 2.3452, + "step": 2259 + }, + { + "epoch": 1.459005810200129, + "grad_norm": 1.134918108603516, + "learning_rate": 0.00029906044483696525, + "loss": 2.2489, + "step": 2260 + }, + { + "epoch": 1.459651387992253, + "grad_norm": 1.1095040908426297, + "learning_rate": 0.00029905321121811, + "loss": 2.3572, + "step": 2261 + }, + { + "epoch": 1.460296965784377, + "grad_norm": 1.0662344930332042, + "learning_rate": 0.0002990459499483364, + "loss": 1.9234, + "step": 2262 + }, + { + "epoch": 1.4609425435765009, + "grad_norm": 1.1546332420539733, + "learning_rate": 0.00029903866102899175, + "loss": 2.612, + "step": 2263 + }, + { + "epoch": 1.4615881213686248, + "grad_norm": 1.1073896560117205, + "learning_rate": 0.0002990313444614281, + "loss": 2.237, + "step": 2264 + }, + { + "epoch": 1.4622336991607487, + "grad_norm": 1.154670932361595, + "learning_rate": 0.00029902400024700267, + "loss": 2.3144, + "step": 2265 + }, + { + "epoch": 1.462879276952873, + "grad_norm": 1.124938543391949, + "learning_rate": 0.000299016628387078, + "loss": 2.4005, + "step": 2266 + }, + { + "epoch": 1.4635248547449968, + "grad_norm": 1.1486622393387773, + "learning_rate": 0.00029900922888302157, + "loss": 2.3731, + "step": 2267 + }, + { + "epoch": 1.4641704325371208, + "grad_norm": 1.0957687083576468, + "learning_rate": 0.00029900180173620614, + "loss": 2.232, + "step": 2268 + }, + { + "epoch": 1.4648160103292447, + "grad_norm": 1.2666983058611392, + "learning_rate": 0.0002989943469480095, + "loss": 2.5041, + "step": 2269 + }, + { + "epoch": 1.4654615881213686, + "grad_norm": 1.1301803503000165, + "learning_rate": 0.00029898686451981457, + "loss": 2.2543, + "step": 2270 + }, + { + "epoch": 1.4661071659134925, + "grad_norm": 1.1556280895511928, + "learning_rate": 0.0002989793544530094, + "loss": 2.3924, + "step": 2271 + }, + { + "epoch": 1.4667527437056165, + "grad_norm": 1.1721525609320802, + "learning_rate": 0.00029897181674898727, + "loss": 2.6801, + "step": 2272 + }, + { + "epoch": 1.4673983214977404, + "grad_norm": 1.059016032453546, + "learning_rate": 0.00029896425140914636, + "loss": 2.4859, + "step": 2273 + }, + { + "epoch": 1.4680438992898646, + "grad_norm": 1.1742348221058772, + "learning_rate": 0.0002989566584348903, + "loss": 2.3417, + "step": 2274 + }, + { + "epoch": 1.4686894770819885, + "grad_norm": 1.1836660225380797, + "learning_rate": 0.0002989490378276275, + "loss": 2.6279, + "step": 2275 + }, + { + "epoch": 1.4693350548741124, + "grad_norm": 1.3534559220149571, + "learning_rate": 0.0002989413895887717, + "loss": 2.3867, + "step": 2276 + }, + { + "epoch": 1.4699806326662364, + "grad_norm": 1.1023566966032727, + "learning_rate": 0.00029893371371974187, + "loss": 2.3079, + "step": 2277 + }, + { + "epoch": 1.4706262104583603, + "grad_norm": 1.203369135374593, + "learning_rate": 0.00029892601022196176, + "loss": 2.4191, + "step": 2278 + }, + { + "epoch": 1.4712717882504842, + "grad_norm": 1.0878534795018346, + "learning_rate": 0.00029891827909686055, + "loss": 2.4387, + "step": 2279 + }, + { + "epoch": 1.4719173660426081, + "grad_norm": 1.3013642443803368, + "learning_rate": 0.0002989105203458725, + "loss": 2.6152, + "step": 2280 + }, + { + "epoch": 1.472562943834732, + "grad_norm": 1.0880606408166185, + "learning_rate": 0.0002989027339704368, + "loss": 1.9333, + "step": 2281 + }, + { + "epoch": 1.473208521626856, + "grad_norm": 1.1236782623366683, + "learning_rate": 0.00029889491997199796, + "loss": 2.2343, + "step": 2282 + }, + { + "epoch": 1.47385409941898, + "grad_norm": 1.1523636165810298, + "learning_rate": 0.0002988870783520056, + "loss": 2.345, + "step": 2283 + }, + { + "epoch": 1.4744996772111039, + "grad_norm": 1.1447227232598045, + "learning_rate": 0.00029887920911191447, + "loss": 2.2691, + "step": 2284 + }, + { + "epoch": 1.4751452550032278, + "grad_norm": 1.1806855114113506, + "learning_rate": 0.0002988713122531842, + "loss": 2.2596, + "step": 2285 + }, + { + "epoch": 1.4757908327953517, + "grad_norm": 1.1571441867386418, + "learning_rate": 0.00029886338777728, + "loss": 2.3495, + "step": 2286 + }, + { + "epoch": 1.4764364105874757, + "grad_norm": 1.0548461517053642, + "learning_rate": 0.00029885543568567173, + "loss": 2.1208, + "step": 2287 + }, + { + "epoch": 1.4770819883795998, + "grad_norm": 1.1150479775585458, + "learning_rate": 0.00029884745597983465, + "loss": 2.1161, + "step": 2288 + }, + { + "epoch": 1.4777275661717237, + "grad_norm": 1.187359538328765, + "learning_rate": 0.00029883944866124907, + "loss": 2.5212, + "step": 2289 + }, + { + "epoch": 1.4783731439638477, + "grad_norm": 1.1305700440664368, + "learning_rate": 0.0002988314137314005, + "loss": 2.3662, + "step": 2290 + }, + { + "epoch": 1.4790187217559716, + "grad_norm": 1.2993464186491666, + "learning_rate": 0.00029882335119177936, + "loss": 2.4386, + "step": 2291 + }, + { + "epoch": 1.4796642995480955, + "grad_norm": 1.0820841241960142, + "learning_rate": 0.0002988152610438815, + "loss": 2.2334, + "step": 2292 + }, + { + "epoch": 1.4803098773402195, + "grad_norm": 1.1371191886694159, + "learning_rate": 0.00029880714328920765, + "loss": 2.5058, + "step": 2293 + }, + { + "epoch": 1.4809554551323434, + "grad_norm": 1.154294719365429, + "learning_rate": 0.00029879899792926377, + "loss": 2.2849, + "step": 2294 + }, + { + "epoch": 1.4816010329244673, + "grad_norm": 1.2006881168939594, + "learning_rate": 0.0002987908249655608, + "loss": 2.3695, + "step": 2295 + }, + { + "epoch": 1.4822466107165915, + "grad_norm": 1.047175149563789, + "learning_rate": 0.00029878262439961506, + "loss": 2.1759, + "step": 2296 + }, + { + "epoch": 1.4828921885087154, + "grad_norm": 1.161426538909281, + "learning_rate": 0.00029877439623294773, + "loss": 2.3766, + "step": 2297 + }, + { + "epoch": 1.4835377663008393, + "grad_norm": 1.1891828111273708, + "learning_rate": 0.0002987661404670853, + "loss": 2.3457, + "step": 2298 + }, + { + "epoch": 1.4841833440929633, + "grad_norm": 1.1325781387976805, + "learning_rate": 0.0002987578571035592, + "loss": 2.2801, + "step": 2299 + }, + { + "epoch": 1.4848289218850872, + "grad_norm": 1.1885002953421342, + "learning_rate": 0.00029874954614390615, + "loss": 2.4617, + "step": 2300 + }, + { + "epoch": 1.4854744996772111, + "grad_norm": 1.220968529190303, + "learning_rate": 0.00029874120758966794, + "loss": 2.4581, + "step": 2301 + }, + { + "epoch": 1.486120077469335, + "grad_norm": 1.1639228861661643, + "learning_rate": 0.0002987328414423914, + "loss": 2.6539, + "step": 2302 + }, + { + "epoch": 1.486765655261459, + "grad_norm": 1.2272772129910219, + "learning_rate": 0.00029872444770362856, + "loss": 2.4781, + "step": 2303 + }, + { + "epoch": 1.487411233053583, + "grad_norm": 1.0638639411850355, + "learning_rate": 0.0002987160263749366, + "loss": 2.3444, + "step": 2304 + }, + { + "epoch": 1.4880568108457068, + "grad_norm": 1.0426415580831097, + "learning_rate": 0.0002987075774578777, + "loss": 2.1567, + "step": 2305 + }, + { + "epoch": 1.4887023886378308, + "grad_norm": 1.150782114571974, + "learning_rate": 0.0002986991009540192, + "loss": 2.3275, + "step": 2306 + }, + { + "epoch": 1.4893479664299547, + "grad_norm": 1.1589044439878775, + "learning_rate": 0.00029869059686493367, + "loss": 1.949, + "step": 2307 + }, + { + "epoch": 1.4899935442220786, + "grad_norm": 1.2204047653923518, + "learning_rate": 0.0002986820651921986, + "loss": 2.6215, + "step": 2308 + }, + { + "epoch": 1.4906391220142028, + "grad_norm": 1.0734866452852212, + "learning_rate": 0.00029867350593739675, + "loss": 2.4438, + "step": 2309 + }, + { + "epoch": 1.4912846998063267, + "grad_norm": 1.1060975608827015, + "learning_rate": 0.00029866491910211595, + "loss": 2.2131, + "step": 2310 + }, + { + "epoch": 1.4919302775984506, + "grad_norm": 1.43228389428392, + "learning_rate": 0.00029865630468794925, + "loss": 2.4617, + "step": 2311 + }, + { + "epoch": 1.4925758553905746, + "grad_norm": 1.1913094547740826, + "learning_rate": 0.0002986476626964945, + "loss": 2.3987, + "step": 2312 + }, + { + "epoch": 1.4932214331826985, + "grad_norm": 1.1707465354305784, + "learning_rate": 0.00029863899312935506, + "loss": 2.2907, + "step": 2313 + }, + { + "epoch": 1.4938670109748224, + "grad_norm": 1.2566988678399704, + "learning_rate": 0.00029863029598813914, + "loss": 2.4735, + "step": 2314 + }, + { + "epoch": 1.4945125887669464, + "grad_norm": 1.0081138893519244, + "learning_rate": 0.0002986215712744602, + "loss": 1.9094, + "step": 2315 + }, + { + "epoch": 1.4951581665590703, + "grad_norm": 1.1586382164910847, + "learning_rate": 0.00029861281898993665, + "loss": 2.4979, + "step": 2316 + }, + { + "epoch": 1.4958037443511945, + "grad_norm": 1.0810458750340275, + "learning_rate": 0.0002986040391361923, + "loss": 2.3571, + "step": 2317 + }, + { + "epoch": 1.4964493221433184, + "grad_norm": 1.07206782537489, + "learning_rate": 0.0002985952317148558, + "loss": 2.4648, + "step": 2318 + }, + { + "epoch": 1.4970948999354423, + "grad_norm": 1.2601595293225787, + "learning_rate": 0.00029858639672756106, + "loss": 2.351, + "step": 2319 + }, + { + "epoch": 1.4977404777275662, + "grad_norm": 1.2495007857991263, + "learning_rate": 0.000298577534175947, + "loss": 2.4167, + "step": 2320 + }, + { + "epoch": 1.4983860555196902, + "grad_norm": 1.1331462314798377, + "learning_rate": 0.00029856864406165777, + "loss": 2.4703, + "step": 2321 + }, + { + "epoch": 1.499031633311814, + "grad_norm": 1.1610605967606036, + "learning_rate": 0.0002985597263863425, + "loss": 2.572, + "step": 2322 + }, + { + "epoch": 1.499677211103938, + "grad_norm": 1.14246129118964, + "learning_rate": 0.0002985507811516557, + "loss": 2.1735, + "step": 2323 + }, + { + "epoch": 1.500322788896062, + "grad_norm": 1.1614263913214964, + "learning_rate": 0.0002985418083592566, + "loss": 2.3227, + "step": 2324 + }, + { + "epoch": 1.500968366688186, + "grad_norm": 1.0322901605290367, + "learning_rate": 0.0002985328080108098, + "loss": 1.9087, + "step": 2325 + }, + { + "epoch": 1.5016139444803098, + "grad_norm": 1.0705223611526624, + "learning_rate": 0.000298523780107985, + "loss": 2.3053, + "step": 2326 + }, + { + "epoch": 1.5022595222724338, + "grad_norm": 1.204568042178985, + "learning_rate": 0.000298514724652457, + "loss": 2.4152, + "step": 2327 + }, + { + "epoch": 1.5029051000645577, + "grad_norm": 1.0562397439322329, + "learning_rate": 0.0002985056416459056, + "loss": 2.2556, + "step": 2328 + }, + { + "epoch": 1.5035506778566816, + "grad_norm": 1.1125565233585646, + "learning_rate": 0.00029849653109001586, + "loss": 2.5828, + "step": 2329 + }, + { + "epoch": 1.5041962556488055, + "grad_norm": 1.2981326185477946, + "learning_rate": 0.00029848739298647784, + "loss": 2.1841, + "step": 2330 + }, + { + "epoch": 1.5048418334409295, + "grad_norm": 1.1512404027287897, + "learning_rate": 0.0002984782273369867, + "loss": 2.4683, + "step": 2331 + }, + { + "epoch": 1.5054874112330536, + "grad_norm": 1.0746773150038889, + "learning_rate": 0.00029846903414324286, + "loss": 2.4786, + "step": 2332 + }, + { + "epoch": 1.5061329890251776, + "grad_norm": 1.1715165015219726, + "learning_rate": 0.0002984598134069517, + "loss": 2.5153, + "step": 2333 + }, + { + "epoch": 1.5067785668173015, + "grad_norm": 1.0828376929119332, + "learning_rate": 0.0002984505651298238, + "loss": 2.0323, + "step": 2334 + }, + { + "epoch": 1.5074241446094254, + "grad_norm": 1.1399560285980188, + "learning_rate": 0.00029844128931357487, + "loss": 2.301, + "step": 2335 + }, + { + "epoch": 1.5080697224015494, + "grad_norm": 1.166908597232531, + "learning_rate": 0.00029843198595992545, + "loss": 2.3441, + "step": 2336 + }, + { + "epoch": 1.5087153001936735, + "grad_norm": 1.137290856175705, + "learning_rate": 0.00029842265507060167, + "loss": 2.4036, + "step": 2337 + }, + { + "epoch": 1.5093608779857974, + "grad_norm": 1.0804531927236751, + "learning_rate": 0.0002984132966473343, + "loss": 1.8618, + "step": 2338 + }, + { + "epoch": 1.5100064557779214, + "grad_norm": 1.1988159013290056, + "learning_rate": 0.0002984039106918596, + "loss": 2.3691, + "step": 2339 + }, + { + "epoch": 1.5106520335700453, + "grad_norm": 1.1558166935195755, + "learning_rate": 0.00029839449720591853, + "loss": 2.3381, + "step": 2340 + }, + { + "epoch": 1.5112976113621692, + "grad_norm": 1.1844846880086037, + "learning_rate": 0.0002983850561912576, + "loss": 2.4144, + "step": 2341 + }, + { + "epoch": 1.5119431891542932, + "grad_norm": 1.075949189470953, + "learning_rate": 0.0002983755876496282, + "loss": 2.4503, + "step": 2342 + }, + { + "epoch": 1.512588766946417, + "grad_norm": 1.172647996449451, + "learning_rate": 0.00029836609158278676, + "loss": 2.4952, + "step": 2343 + }, + { + "epoch": 1.513234344738541, + "grad_norm": 1.219611544368217, + "learning_rate": 0.00029835656799249494, + "loss": 2.4548, + "step": 2344 + }, + { + "epoch": 1.513879922530665, + "grad_norm": 1.180861034305046, + "learning_rate": 0.00029834701688051947, + "loss": 2.078, + "step": 2345 + }, + { + "epoch": 1.5145255003227889, + "grad_norm": 1.0826515425723142, + "learning_rate": 0.0002983374382486321, + "loss": 2.3477, + "step": 2346 + }, + { + "epoch": 1.5151710781149128, + "grad_norm": 1.1280249055652711, + "learning_rate": 0.0002983278320986099, + "loss": 2.0573, + "step": 2347 + }, + { + "epoch": 1.5158166559070367, + "grad_norm": 1.1112774519702864, + "learning_rate": 0.0002983181984322348, + "loss": 2.3213, + "step": 2348 + }, + { + "epoch": 1.5164622336991607, + "grad_norm": 1.0682736323407362, + "learning_rate": 0.000298308537251294, + "loss": 2.3648, + "step": 2349 + }, + { + "epoch": 1.5171078114912846, + "grad_norm": 1.0283174051468689, + "learning_rate": 0.00029829884855757973, + "loss": 1.8867, + "step": 2350 + }, + { + "epoch": 1.5177533892834085, + "grad_norm": 1.2713070048545192, + "learning_rate": 0.0002982891323528894, + "loss": 2.4014, + "step": 2351 + }, + { + "epoch": 1.5183989670755325, + "grad_norm": 1.265167001278272, + "learning_rate": 0.0002982793886390254, + "loss": 2.6143, + "step": 2352 + }, + { + "epoch": 1.5190445448676564, + "grad_norm": 1.0655600086573618, + "learning_rate": 0.00029826961741779526, + "loss": 2.3697, + "step": 2353 + }, + { + "epoch": 1.5196901226597805, + "grad_norm": 1.2033909621023668, + "learning_rate": 0.00029825981869101173, + "loss": 2.5961, + "step": 2354 + }, + { + "epoch": 1.5203357004519045, + "grad_norm": 1.2399033073793548, + "learning_rate": 0.0002982499924604925, + "loss": 2.4235, + "step": 2355 + }, + { + "epoch": 1.5209812782440284, + "grad_norm": 1.1034646699475763, + "learning_rate": 0.00029824013872806047, + "loss": 2.2868, + "step": 2356 + }, + { + "epoch": 1.5216268560361523, + "grad_norm": 1.2071694163450226, + "learning_rate": 0.0002982302574955437, + "loss": 2.0027, + "step": 2357 + }, + { + "epoch": 1.5222724338282763, + "grad_norm": 1.1003450920751954, + "learning_rate": 0.0002982203487647751, + "loss": 2.1877, + "step": 2358 + }, + { + "epoch": 1.5229180116204004, + "grad_norm": 1.3255763772935487, + "learning_rate": 0.00029821041253759293, + "loss": 2.3739, + "step": 2359 + }, + { + "epoch": 1.5235635894125243, + "grad_norm": 1.3745142222466047, + "learning_rate": 0.00029820044881584043, + "loss": 2.462, + "step": 2360 + }, + { + "epoch": 1.5242091672046483, + "grad_norm": 1.0967317229840592, + "learning_rate": 0.000298190457601366, + "loss": 2.2736, + "step": 2361 + }, + { + "epoch": 1.5248547449967722, + "grad_norm": 1.3136903594222717, + "learning_rate": 0.00029818043889602314, + "loss": 2.0093, + "step": 2362 + }, + { + "epoch": 1.5255003227888961, + "grad_norm": 1.3273793105981893, + "learning_rate": 0.00029817039270167034, + "loss": 2.4331, + "step": 2363 + }, + { + "epoch": 1.52614590058102, + "grad_norm": 1.1792559940269847, + "learning_rate": 0.0002981603190201714, + "loss": 2.4819, + "step": 2364 + }, + { + "epoch": 1.526791478373144, + "grad_norm": 1.084591410071819, + "learning_rate": 0.00029815021785339495, + "loss": 2.464, + "step": 2365 + }, + { + "epoch": 1.527437056165268, + "grad_norm": 1.096469598097682, + "learning_rate": 0.000298140089203215, + "loss": 2.3844, + "step": 2366 + }, + { + "epoch": 1.5280826339573919, + "grad_norm": 1.307855602939095, + "learning_rate": 0.00029812993307151036, + "loss": 2.4814, + "step": 2367 + }, + { + "epoch": 1.5287282117495158, + "grad_norm": 1.072902057854611, + "learning_rate": 0.0002981197494601652, + "loss": 2.3342, + "step": 2368 + }, + { + "epoch": 1.5293737895416397, + "grad_norm": 1.155733121571183, + "learning_rate": 0.00029810953837106877, + "loss": 2.2094, + "step": 2369 + }, + { + "epoch": 1.5300193673337636, + "grad_norm": 1.0375272845403638, + "learning_rate": 0.0002980992998061152, + "loss": 2.3545, + "step": 2370 + }, + { + "epoch": 1.5306649451258876, + "grad_norm": 1.2263351530891307, + "learning_rate": 0.0002980890337672038, + "loss": 2.4623, + "step": 2371 + }, + { + "epoch": 1.5313105229180115, + "grad_norm": 1.233867412398953, + "learning_rate": 0.0002980787402562392, + "loss": 2.5188, + "step": 2372 + }, + { + "epoch": 1.5319561007101354, + "grad_norm": 1.1183151489282754, + "learning_rate": 0.0002980684192751308, + "loss": 2.5458, + "step": 2373 + }, + { + "epoch": 1.5326016785022594, + "grad_norm": 1.1315839865959196, + "learning_rate": 0.0002980580708257934, + "loss": 2.3123, + "step": 2374 + }, + { + "epoch": 1.5332472562943835, + "grad_norm": 1.1364412688279444, + "learning_rate": 0.0002980476949101466, + "loss": 2.2294, + "step": 2375 + }, + { + "epoch": 1.5338928340865075, + "grad_norm": 1.2087094335985138, + "learning_rate": 0.0002980372915301153, + "loss": 2.595, + "step": 2376 + }, + { + "epoch": 1.5345384118786314, + "grad_norm": 1.138771151481328, + "learning_rate": 0.0002980268606876294, + "loss": 2.6, + "step": 2377 + }, + { + "epoch": 1.5351839896707553, + "grad_norm": 1.076320482907991, + "learning_rate": 0.000298016402384624, + "loss": 2.1611, + "step": 2378 + }, + { + "epoch": 1.5358295674628792, + "grad_norm": 1.04418984703873, + "learning_rate": 0.00029800591662303925, + "loss": 1.9814, + "step": 2379 + }, + { + "epoch": 1.5364751452550034, + "grad_norm": 1.2157536637837612, + "learning_rate": 0.00029799540340482025, + "loss": 2.1712, + "step": 2380 + }, + { + "epoch": 1.5371207230471273, + "grad_norm": 1.054785711713118, + "learning_rate": 0.0002979848627319173, + "loss": 2.3137, + "step": 2381 + }, + { + "epoch": 1.5377663008392513, + "grad_norm": 1.1388626694837094, + "learning_rate": 0.000297974294606286, + "loss": 2.3719, + "step": 2382 + }, + { + "epoch": 1.5384118786313752, + "grad_norm": 1.167715322524665, + "learning_rate": 0.00029796369902988665, + "loss": 2.4336, + "step": 2383 + }, + { + "epoch": 1.5390574564234991, + "grad_norm": 1.123997682186135, + "learning_rate": 0.0002979530760046849, + "loss": 2.256, + "step": 2384 + }, + { + "epoch": 1.539703034215623, + "grad_norm": 1.1150268459925374, + "learning_rate": 0.0002979424255326514, + "loss": 2.1861, + "step": 2385 + }, + { + "epoch": 1.540348612007747, + "grad_norm": 1.133889039402519, + "learning_rate": 0.000297931747615762, + "loss": 2.3426, + "step": 2386 + }, + { + "epoch": 1.540994189799871, + "grad_norm": 1.182643369230545, + "learning_rate": 0.00029792104225599757, + "loss": 2.2457, + "step": 2387 + }, + { + "epoch": 1.5416397675919948, + "grad_norm": 1.0600222496616536, + "learning_rate": 0.000297910309455344, + "loss": 2.2431, + "step": 2388 + }, + { + "epoch": 1.5422853453841188, + "grad_norm": 1.0525716707437394, + "learning_rate": 0.0002978995492157924, + "loss": 2.2722, + "step": 2389 + }, + { + "epoch": 1.5429309231762427, + "grad_norm": 1.1006193656426477, + "learning_rate": 0.00029788876153933876, + "loss": 2.3622, + "step": 2390 + }, + { + "epoch": 1.5435765009683666, + "grad_norm": 1.255514212357069, + "learning_rate": 0.0002978779464279845, + "loss": 2.4549, + "step": 2391 + }, + { + "epoch": 1.5442220787604906, + "grad_norm": 1.1518891232120152, + "learning_rate": 0.0002978671038837358, + "loss": 2.4, + "step": 2392 + }, + { + "epoch": 1.5448676565526145, + "grad_norm": 1.0986893771898705, + "learning_rate": 0.0002978562339086041, + "loss": 2.114, + "step": 2393 + }, + { + "epoch": 1.5455132343447384, + "grad_norm": 1.1471905964573006, + "learning_rate": 0.00029784533650460595, + "loss": 2.3179, + "step": 2394 + }, + { + "epoch": 1.5461588121368623, + "grad_norm": 1.0985265983512422, + "learning_rate": 0.0002978344116737628, + "loss": 2.0124, + "step": 2395 + }, + { + "epoch": 1.5468043899289863, + "grad_norm": 1.1899951157983653, + "learning_rate": 0.00029782345941810145, + "loss": 2.3358, + "step": 2396 + }, + { + "epoch": 1.5474499677211104, + "grad_norm": 1.249188105565493, + "learning_rate": 0.00029781247973965366, + "loss": 2.5853, + "step": 2397 + }, + { + "epoch": 1.5480955455132344, + "grad_norm": 1.1528405080199353, + "learning_rate": 0.0002978014726404562, + "loss": 2.6342, + "step": 2398 + }, + { + "epoch": 1.5487411233053583, + "grad_norm": 1.1753541819920912, + "learning_rate": 0.00029779043812255097, + "loss": 2.4049, + "step": 2399 + }, + { + "epoch": 1.5493867010974822, + "grad_norm": 1.079859407639004, + "learning_rate": 0.0002977793761879851, + "loss": 2.2448, + "step": 2400 + }, + { + "epoch": 1.5500322788896062, + "grad_norm": 1.183177283561767, + "learning_rate": 0.00029776828683881064, + "loss": 2.443, + "step": 2401 + }, + { + "epoch": 1.5506778566817303, + "grad_norm": 1.2107541074405905, + "learning_rate": 0.00029775717007708474, + "loss": 2.4594, + "step": 2402 + }, + { + "epoch": 1.5513234344738542, + "grad_norm": 1.0687980792814211, + "learning_rate": 0.00029774602590486977, + "loss": 2.4865, + "step": 2403 + }, + { + "epoch": 1.5519690122659782, + "grad_norm": 1.1163866099538666, + "learning_rate": 0.000297734854324233, + "loss": 2.0961, + "step": 2404 + }, + { + "epoch": 1.552614590058102, + "grad_norm": 1.1055366759482554, + "learning_rate": 0.00029772365533724693, + "loss": 1.8749, + "step": 2405 + }, + { + "epoch": 1.553260167850226, + "grad_norm": 1.1773596135762125, + "learning_rate": 0.000297712428945989, + "loss": 2.549, + "step": 2406 + }, + { + "epoch": 1.55390574564235, + "grad_norm": 1.116674702034207, + "learning_rate": 0.0002977011751525419, + "loss": 2.4346, + "step": 2407 + }, + { + "epoch": 1.5545513234344739, + "grad_norm": 1.167686699231868, + "learning_rate": 0.0002976898939589934, + "loss": 2.5944, + "step": 2408 + }, + { + "epoch": 1.5551969012265978, + "grad_norm": 1.1584663667182622, + "learning_rate": 0.00029767858536743606, + "loss": 2.3461, + "step": 2409 + }, + { + "epoch": 1.5558424790187217, + "grad_norm": 1.134267567185316, + "learning_rate": 0.00029766724937996795, + "loss": 2.2614, + "step": 2410 + }, + { + "epoch": 1.5564880568108457, + "grad_norm": 1.1963753906163859, + "learning_rate": 0.00029765588599869197, + "loss": 2.3359, + "step": 2411 + }, + { + "epoch": 1.5571336346029696, + "grad_norm": 1.1389083396669029, + "learning_rate": 0.000297644495225716, + "loss": 1.813, + "step": 2412 + }, + { + "epoch": 1.5577792123950935, + "grad_norm": 1.0605347346393101, + "learning_rate": 0.0002976330770631534, + "loss": 1.9832, + "step": 2413 + }, + { + "epoch": 1.5584247901872175, + "grad_norm": 1.1345066975802205, + "learning_rate": 0.0002976216315131221, + "loss": 2.4113, + "step": 2414 + }, + { + "epoch": 1.5590703679793414, + "grad_norm": 1.2057975199474822, + "learning_rate": 0.0002976101585777455, + "loss": 2.2486, + "step": 2415 + }, + { + "epoch": 1.5597159457714653, + "grad_norm": 1.2706221193338931, + "learning_rate": 0.000297598658259152, + "loss": 2.5905, + "step": 2416 + }, + { + "epoch": 1.5603615235635893, + "grad_norm": 1.2207530054761648, + "learning_rate": 0.00029758713055947494, + "loss": 2.3982, + "step": 2417 + }, + { + "epoch": 1.5610071013557134, + "grad_norm": 1.3130496754201928, + "learning_rate": 0.00029757557548085276, + "loss": 2.3276, + "step": 2418 + }, + { + "epoch": 1.5616526791478373, + "grad_norm": 1.11422028571568, + "learning_rate": 0.00029756399302542923, + "loss": 2.4123, + "step": 2419 + }, + { + "epoch": 1.5622982569399613, + "grad_norm": 1.1766712526303933, + "learning_rate": 0.00029755238319535286, + "loss": 2.638, + "step": 2420 + }, + { + "epoch": 1.5629438347320852, + "grad_norm": 1.1862454519132228, + "learning_rate": 0.0002975407459927775, + "loss": 2.4667, + "step": 2421 + }, + { + "epoch": 1.5635894125242091, + "grad_norm": 1.1089995421361285, + "learning_rate": 0.0002975290814198619, + "loss": 2.3303, + "step": 2422 + }, + { + "epoch": 1.564234990316333, + "grad_norm": 1.1245471855211113, + "learning_rate": 0.00029751738947877, + "loss": 1.7846, + "step": 2423 + }, + { + "epoch": 1.5648805681084572, + "grad_norm": 1.2319466320988246, + "learning_rate": 0.00029750567017167075, + "loss": 2.3765, + "step": 2424 + }, + { + "epoch": 1.5655261459005811, + "grad_norm": 1.2559209489706182, + "learning_rate": 0.00029749392350073825, + "loss": 2.4259, + "step": 2425 + }, + { + "epoch": 1.566171723692705, + "grad_norm": 1.0963774301173892, + "learning_rate": 0.00029748214946815157, + "loss": 2.2232, + "step": 2426 + }, + { + "epoch": 1.566817301484829, + "grad_norm": 1.0544655506233478, + "learning_rate": 0.00029747034807609495, + "loss": 2.4188, + "step": 2427 + }, + { + "epoch": 1.567462879276953, + "grad_norm": 1.0911693864689536, + "learning_rate": 0.00029745851932675766, + "loss": 2.224, + "step": 2428 + }, + { + "epoch": 1.5681084570690769, + "grad_norm": 1.1622303080047613, + "learning_rate": 0.000297446663222334, + "loss": 2.5122, + "step": 2429 + }, + { + "epoch": 1.5687540348612008, + "grad_norm": 1.0200653551788366, + "learning_rate": 0.00029743477976502356, + "loss": 1.9532, + "step": 2430 + }, + { + "epoch": 1.5693996126533247, + "grad_norm": 1.2471751871233856, + "learning_rate": 0.00029742286895703067, + "loss": 2.519, + "step": 2431 + }, + { + "epoch": 1.5700451904454487, + "grad_norm": 1.1198301450960464, + "learning_rate": 0.000297410930800565, + "loss": 2.1429, + "step": 2432 + }, + { + "epoch": 1.5706907682375726, + "grad_norm": 1.1949754870879399, + "learning_rate": 0.00029739896529784116, + "loss": 2.2115, + "step": 2433 + }, + { + "epoch": 1.5713363460296965, + "grad_norm": 1.0875393622625813, + "learning_rate": 0.000297386972451079, + "loss": 2.1986, + "step": 2434 + }, + { + "epoch": 1.5719819238218204, + "grad_norm": 1.1542386901927293, + "learning_rate": 0.00029737495226250305, + "loss": 2.3004, + "step": 2435 + }, + { + "epoch": 1.5726275016139444, + "grad_norm": 1.2198693311051851, + "learning_rate": 0.00029736290473434345, + "loss": 2.4717, + "step": 2436 + }, + { + "epoch": 1.5732730794060683, + "grad_norm": 1.386839097054103, + "learning_rate": 0.00029735082986883505, + "loss": 2.3792, + "step": 2437 + }, + { + "epoch": 1.5739186571981922, + "grad_norm": 1.1503638690359685, + "learning_rate": 0.0002973387276682178, + "loss": 2.4207, + "step": 2438 + }, + { + "epoch": 1.5745642349903162, + "grad_norm": 1.1270893478766129, + "learning_rate": 0.00029732659813473683, + "loss": 2.2153, + "step": 2439 + }, + { + "epoch": 1.5752098127824403, + "grad_norm": 1.141882642302233, + "learning_rate": 0.0002973144412706424, + "loss": 2.2097, + "step": 2440 + }, + { + "epoch": 1.5758553905745643, + "grad_norm": 1.3080005875168752, + "learning_rate": 0.0002973022570781895, + "loss": 2.3984, + "step": 2441 + }, + { + "epoch": 1.5765009683666882, + "grad_norm": 1.09491649641211, + "learning_rate": 0.00029729004555963866, + "loss": 2.2427, + "step": 2442 + }, + { + "epoch": 1.5771465461588121, + "grad_norm": 1.143929034314331, + "learning_rate": 0.00029727780671725504, + "loss": 2.3622, + "step": 2443 + }, + { + "epoch": 1.577792123950936, + "grad_norm": 1.3057864454172692, + "learning_rate": 0.00029726554055330926, + "loss": 2.5593, + "step": 2444 + }, + { + "epoch": 1.5784377017430602, + "grad_norm": 1.2053882369147364, + "learning_rate": 0.00029725324707007667, + "loss": 2.3071, + "step": 2445 + }, + { + "epoch": 1.5790832795351841, + "grad_norm": 1.3016182947435326, + "learning_rate": 0.00029724092626983795, + "loss": 2.5506, + "step": 2446 + }, + { + "epoch": 1.579728857327308, + "grad_norm": 1.1426679669042585, + "learning_rate": 0.00029722857815487865, + "loss": 2.214, + "step": 2447 + }, + { + "epoch": 1.580374435119432, + "grad_norm": 1.1649950861978031, + "learning_rate": 0.0002972162027274896, + "loss": 2.4357, + "step": 2448 + }, + { + "epoch": 1.581020012911556, + "grad_norm": 1.1550290976280964, + "learning_rate": 0.00029720379998996637, + "loss": 2.252, + "step": 2449 + }, + { + "epoch": 1.5816655907036798, + "grad_norm": 1.3793398986897714, + "learning_rate": 0.0002971913699446099, + "loss": 2.4994, + "step": 2450 + }, + { + "epoch": 1.5823111684958038, + "grad_norm": 1.1145876558702685, + "learning_rate": 0.00029717891259372617, + "loss": 2.2824, + "step": 2451 + }, + { + "epoch": 1.5829567462879277, + "grad_norm": 1.1207735253446138, + "learning_rate": 0.0002971664279396261, + "loss": 2.4354, + "step": 2452 + }, + { + "epoch": 1.5836023240800516, + "grad_norm": 1.120824805469874, + "learning_rate": 0.00029715391598462564, + "loss": 2.1589, + "step": 2453 + }, + { + "epoch": 1.5842479018721756, + "grad_norm": 1.2763194799282105, + "learning_rate": 0.000297141376731046, + "loss": 2.6104, + "step": 2454 + }, + { + "epoch": 1.5848934796642995, + "grad_norm": 1.1099511760293466, + "learning_rate": 0.00029712881018121326, + "loss": 2.4147, + "step": 2455 + }, + { + "epoch": 1.5855390574564234, + "grad_norm": 1.2601130763597754, + "learning_rate": 0.0002971162163374587, + "loss": 2.4546, + "step": 2456 + }, + { + "epoch": 1.5861846352485474, + "grad_norm": 1.0550331001512234, + "learning_rate": 0.00029710359520211855, + "loss": 2.3423, + "step": 2457 + }, + { + "epoch": 1.5868302130406713, + "grad_norm": 1.125855686312431, + "learning_rate": 0.0002970909467775342, + "loss": 2.4035, + "step": 2458 + }, + { + "epoch": 1.5874757908327952, + "grad_norm": 1.0961771429537046, + "learning_rate": 0.0002970782710660521, + "loss": 2.2406, + "step": 2459 + }, + { + "epoch": 1.5881213686249191, + "grad_norm": 1.06224848394204, + "learning_rate": 0.00029706556807002364, + "loss": 2.2988, + "step": 2460 + }, + { + "epoch": 1.5887669464170433, + "grad_norm": 1.2887322675711215, + "learning_rate": 0.00029705283779180545, + "loss": 2.5934, + "step": 2461 + }, + { + "epoch": 1.5894125242091672, + "grad_norm": 1.1850659761179825, + "learning_rate": 0.0002970400802337591, + "loss": 2.3281, + "step": 2462 + }, + { + "epoch": 1.5900581020012912, + "grad_norm": 1.0457649496843442, + "learning_rate": 0.00029702729539825116, + "loss": 2.3114, + "step": 2463 + }, + { + "epoch": 1.590703679793415, + "grad_norm": 1.147220424965352, + "learning_rate": 0.00029701448328765345, + "loss": 2.3636, + "step": 2464 + }, + { + "epoch": 1.591349257585539, + "grad_norm": 1.1253212650222166, + "learning_rate": 0.00029700164390434274, + "loss": 2.2487, + "step": 2465 + }, + { + "epoch": 1.591994835377663, + "grad_norm": 1.181830968861507, + "learning_rate": 0.00029698877725070085, + "loss": 2.386, + "step": 2466 + }, + { + "epoch": 1.592640413169787, + "grad_norm": 1.1286260306857745, + "learning_rate": 0.00029697588332911466, + "loss": 2.2287, + "step": 2467 + }, + { + "epoch": 1.593285990961911, + "grad_norm": 1.0838485813304248, + "learning_rate": 0.0002969629621419761, + "loss": 2.3965, + "step": 2468 + }, + { + "epoch": 1.593931568754035, + "grad_norm": 1.2325754270470966, + "learning_rate": 0.00029695001369168237, + "loss": 2.3826, + "step": 2469 + }, + { + "epoch": 1.594577146546159, + "grad_norm": 1.4430090016681798, + "learning_rate": 0.00029693703798063534, + "loss": 2.415, + "step": 2470 + }, + { + "epoch": 1.5952227243382828, + "grad_norm": 1.1653525066901536, + "learning_rate": 0.0002969240350112421, + "loss": 2.394, + "step": 2471 + }, + { + "epoch": 1.5958683021304068, + "grad_norm": 1.2019005618788088, + "learning_rate": 0.00029691100478591506, + "loss": 1.9651, + "step": 2472 + }, + { + "epoch": 1.5965138799225307, + "grad_norm": 1.1107356030192246, + "learning_rate": 0.00029689794730707134, + "loss": 2.3321, + "step": 2473 + }, + { + "epoch": 1.5971594577146546, + "grad_norm": 1.1844492053684184, + "learning_rate": 0.0002968848625771332, + "loss": 2.3173, + "step": 2474 + }, + { + "epoch": 1.5978050355067785, + "grad_norm": 1.2047280504275193, + "learning_rate": 0.000296871750598528, + "loss": 2.3201, + "step": 2475 + }, + { + "epoch": 1.5984506132989025, + "grad_norm": 1.1081698814875882, + "learning_rate": 0.00029685861137368816, + "loss": 2.2985, + "step": 2476 + }, + { + "epoch": 1.5990961910910264, + "grad_norm": 1.0965369148286876, + "learning_rate": 0.00029684544490505123, + "loss": 2.4205, + "step": 2477 + }, + { + "epoch": 1.5997417688831503, + "grad_norm": 1.0931204058706363, + "learning_rate": 0.0002968322511950597, + "loss": 2.4274, + "step": 2478 + }, + { + "epoch": 1.6003873466752743, + "grad_norm": 1.1925800432219318, + "learning_rate": 0.00029681903024616097, + "loss": 2.4352, + "step": 2479 + }, + { + "epoch": 1.6010329244673982, + "grad_norm": 1.100979718465681, + "learning_rate": 0.00029680578206080785, + "loss": 2.3904, + "step": 2480 + }, + { + "epoch": 1.6016785022595221, + "grad_norm": 1.0664758755926513, + "learning_rate": 0.000296792506641458, + "loss": 2.0756, + "step": 2481 + }, + { + "epoch": 1.602324080051646, + "grad_norm": 1.0937217481419619, + "learning_rate": 0.00029677920399057405, + "loss": 1.9419, + "step": 2482 + }, + { + "epoch": 1.6029696578437702, + "grad_norm": 1.062880929274532, + "learning_rate": 0.00029676587411062383, + "loss": 2.2843, + "step": 2483 + }, + { + "epoch": 1.6036152356358941, + "grad_norm": 1.0453324434417115, + "learning_rate": 0.00029675251700408023, + "loss": 2.3531, + "step": 2484 + }, + { + "epoch": 1.604260813428018, + "grad_norm": 1.3532025594870476, + "learning_rate": 0.00029673913267342104, + "loss": 2.5028, + "step": 2485 + }, + { + "epoch": 1.604906391220142, + "grad_norm": 1.3185509698756497, + "learning_rate": 0.00029672572112112924, + "loss": 2.5061, + "step": 2486 + }, + { + "epoch": 1.605551969012266, + "grad_norm": 1.087955447897756, + "learning_rate": 0.00029671228234969275, + "loss": 2.3601, + "step": 2487 + }, + { + "epoch": 1.60619754680439, + "grad_norm": 1.16915167707285, + "learning_rate": 0.00029669881636160475, + "loss": 2.2275, + "step": 2488 + }, + { + "epoch": 1.606843124596514, + "grad_norm": 1.1363460660252915, + "learning_rate": 0.0002966853231593632, + "loss": 2.3398, + "step": 2489 + }, + { + "epoch": 1.607488702388638, + "grad_norm": 1.0715228311637353, + "learning_rate": 0.0002966718027454712, + "loss": 2.3407, + "step": 2490 + }, + { + "epoch": 1.6081342801807619, + "grad_norm": 1.0865551391757031, + "learning_rate": 0.000296658255122437, + "loss": 2.2804, + "step": 2491 + }, + { + "epoch": 1.6087798579728858, + "grad_norm": 1.1689619505257411, + "learning_rate": 0.0002966446802927738, + "loss": 2.5323, + "step": 2492 + }, + { + "epoch": 1.6094254357650097, + "grad_norm": 1.0336252055286328, + "learning_rate": 0.0002966310782589999, + "loss": 2.375, + "step": 2493 + }, + { + "epoch": 1.6100710135571337, + "grad_norm": 1.1323506142062305, + "learning_rate": 0.0002966174490236386, + "loss": 2.3905, + "step": 2494 + }, + { + "epoch": 1.6107165913492576, + "grad_norm": 1.1190774268741206, + "learning_rate": 0.0002966037925892183, + "loss": 2.4799, + "step": 2495 + }, + { + "epoch": 1.6113621691413815, + "grad_norm": 1.0882165622021498, + "learning_rate": 0.0002965901089582723, + "loss": 2.4208, + "step": 2496 + }, + { + "epoch": 1.6120077469335055, + "grad_norm": 1.1114635204113867, + "learning_rate": 0.0002965763981333392, + "loss": 2.3622, + "step": 2497 + }, + { + "epoch": 1.6126533247256294, + "grad_norm": 1.1640987871659592, + "learning_rate": 0.00029656266011696234, + "loss": 2.4441, + "step": 2498 + }, + { + "epoch": 1.6132989025177533, + "grad_norm": 1.1668033253887342, + "learning_rate": 0.0002965488949116904, + "loss": 2.4387, + "step": 2499 + }, + { + "epoch": 1.6139444803098772, + "grad_norm": 1.1657166727325246, + "learning_rate": 0.0002965351025200769, + "loss": 2.2207, + "step": 2500 + }, + { + "epoch": 1.6145900581020012, + "grad_norm": 1.1705514267206816, + "learning_rate": 0.0002965212829446805, + "loss": 2.4624, + "step": 2501 + }, + { + "epoch": 1.615235635894125, + "grad_norm": 1.1523012805615591, + "learning_rate": 0.0002965074361880649, + "loss": 2.2028, + "step": 2502 + }, + { + "epoch": 1.615881213686249, + "grad_norm": 1.281866878909173, + "learning_rate": 0.0002964935622527987, + "loss": 2.3092, + "step": 2503 + }, + { + "epoch": 1.6165267914783732, + "grad_norm": 1.0850428556809486, + "learning_rate": 0.0002964796611414558, + "loss": 2.1899, + "step": 2504 + }, + { + "epoch": 1.6171723692704971, + "grad_norm": 1.1834250021993422, + "learning_rate": 0.00029646573285661495, + "loss": 2.4005, + "step": 2505 + }, + { + "epoch": 1.617817947062621, + "grad_norm": 1.129741572660534, + "learning_rate": 0.00029645177740086, + "loss": 2.3763, + "step": 2506 + }, + { + "epoch": 1.618463524854745, + "grad_norm": 1.0530949009254087, + "learning_rate": 0.00029643779477677974, + "loss": 2.3357, + "step": 2507 + }, + { + "epoch": 1.619109102646869, + "grad_norm": 1.1011911817266045, + "learning_rate": 0.00029642378498696825, + "loss": 2.3893, + "step": 2508 + }, + { + "epoch": 1.6197546804389928, + "grad_norm": 1.1114564776679903, + "learning_rate": 0.0002964097480340244, + "loss": 2.2336, + "step": 2509 + }, + { + "epoch": 1.620400258231117, + "grad_norm": 1.1197100964520403, + "learning_rate": 0.0002963956839205522, + "loss": 2.4961, + "step": 2510 + }, + { + "epoch": 1.621045836023241, + "grad_norm": 1.133231367278624, + "learning_rate": 0.00029638159264916067, + "loss": 2.4862, + "step": 2511 + }, + { + "epoch": 1.6216914138153649, + "grad_norm": 1.1235817052427552, + "learning_rate": 0.0002963674742224639, + "loss": 2.3782, + "step": 2512 + }, + { + "epoch": 1.6223369916074888, + "grad_norm": 1.2432951064342437, + "learning_rate": 0.000296353328643081, + "loss": 2.2268, + "step": 2513 + }, + { + "epoch": 1.6229825693996127, + "grad_norm": 1.1734581858186688, + "learning_rate": 0.0002963391559136362, + "loss": 2.6344, + "step": 2514 + }, + { + "epoch": 1.6236281471917366, + "grad_norm": 1.1247242039126146, + "learning_rate": 0.00029632495603675854, + "loss": 2.319, + "step": 2515 + }, + { + "epoch": 1.6242737249838606, + "grad_norm": 1.1058164732871643, + "learning_rate": 0.00029631072901508234, + "loss": 2.3262, + "step": 2516 + }, + { + "epoch": 1.6249193027759845, + "grad_norm": 1.1570556956745606, + "learning_rate": 0.0002962964748512469, + "loss": 2.45, + "step": 2517 + }, + { + "epoch": 1.6255648805681084, + "grad_norm": 1.0905088908639602, + "learning_rate": 0.00029628219354789644, + "loss": 2.2344, + "step": 2518 + }, + { + "epoch": 1.6262104583602324, + "grad_norm": 1.2102879760093113, + "learning_rate": 0.0002962678851076803, + "loss": 2.5848, + "step": 2519 + }, + { + "epoch": 1.6268560361523563, + "grad_norm": 1.083309937510268, + "learning_rate": 0.00029625354953325284, + "loss": 2.3187, + "step": 2520 + }, + { + "epoch": 1.6275016139444802, + "grad_norm": 1.3431861340095863, + "learning_rate": 0.0002962391868272735, + "loss": 2.3894, + "step": 2521 + }, + { + "epoch": 1.6281471917366042, + "grad_norm": 1.366700238173557, + "learning_rate": 0.0002962247969924067, + "loss": 2.478, + "step": 2522 + }, + { + "epoch": 1.628792769528728, + "grad_norm": 1.1277706906347982, + "learning_rate": 0.0002962103800313219, + "loss": 2.4406, + "step": 2523 + }, + { + "epoch": 1.629438347320852, + "grad_norm": 1.0788583922708788, + "learning_rate": 0.0002961959359466935, + "loss": 2.1647, + "step": 2524 + }, + { + "epoch": 1.630083925112976, + "grad_norm": 1.3495058277089764, + "learning_rate": 0.0002961814647412012, + "loss": 2.5268, + "step": 2525 + }, + { + "epoch": 1.6307295029051, + "grad_norm": 1.1118514815247555, + "learning_rate": 0.00029616696641752944, + "loss": 2.2054, + "step": 2526 + }, + { + "epoch": 1.631375080697224, + "grad_norm": 1.1926876538900273, + "learning_rate": 0.00029615244097836786, + "loss": 2.2125, + "step": 2527 + }, + { + "epoch": 1.632020658489348, + "grad_norm": 1.1795531650450104, + "learning_rate": 0.000296137888426411, + "loss": 2.3337, + "step": 2528 + }, + { + "epoch": 1.632666236281472, + "grad_norm": 1.0969815373208323, + "learning_rate": 0.0002961233087643586, + "loss": 2.3295, + "step": 2529 + }, + { + "epoch": 1.6333118140735958, + "grad_norm": 1.1546808661353944, + "learning_rate": 0.0002961087019949154, + "loss": 2.4057, + "step": 2530 + }, + { + "epoch": 1.63395739186572, + "grad_norm": 1.1931163812629204, + "learning_rate": 0.0002960940681207909, + "loss": 2.3924, + "step": 2531 + }, + { + "epoch": 1.634602969657844, + "grad_norm": 1.1076482546278055, + "learning_rate": 0.00029607940714470004, + "loss": 2.3703, + "step": 2532 + }, + { + "epoch": 1.6352485474499678, + "grad_norm": 1.208464698619053, + "learning_rate": 0.00029606471906936245, + "loss": 2.4977, + "step": 2533 + }, + { + "epoch": 1.6358941252420918, + "grad_norm": 1.055063802944412, + "learning_rate": 0.00029605000389750294, + "loss": 2.1482, + "step": 2534 + }, + { + "epoch": 1.6365397030342157, + "grad_norm": 1.2106183032708273, + "learning_rate": 0.0002960352616318514, + "loss": 2.2976, + "step": 2535 + }, + { + "epoch": 1.6371852808263396, + "grad_norm": 1.047853881343735, + "learning_rate": 0.0002960204922751426, + "loss": 2.2595, + "step": 2536 + }, + { + "epoch": 1.6378308586184636, + "grad_norm": 1.1024270598440842, + "learning_rate": 0.0002960056958301164, + "loss": 2.4838, + "step": 2537 + }, + { + "epoch": 1.6384764364105875, + "grad_norm": 0.993797095605994, + "learning_rate": 0.00029599087229951776, + "loss": 2.3351, + "step": 2538 + }, + { + "epoch": 1.6391220142027114, + "grad_norm": 1.1997452845797412, + "learning_rate": 0.0002959760216860966, + "loss": 2.2677, + "step": 2539 + }, + { + "epoch": 1.6397675919948353, + "grad_norm": 1.1685226194692142, + "learning_rate": 0.0002959611439926078, + "loss": 2.4226, + "step": 2540 + }, + { + "epoch": 1.6404131697869593, + "grad_norm": 1.1760053463920266, + "learning_rate": 0.00029594623922181136, + "loss": 2.4444, + "step": 2541 + }, + { + "epoch": 1.6410587475790832, + "grad_norm": 1.0465243879213075, + "learning_rate": 0.0002959313073764723, + "loss": 2.1577, + "step": 2542 + }, + { + "epoch": 1.6417043253712071, + "grad_norm": 1.2199963041743795, + "learning_rate": 0.00029591634845936056, + "loss": 2.4089, + "step": 2543 + }, + { + "epoch": 1.642349903163331, + "grad_norm": 1.053768516814903, + "learning_rate": 0.00029590136247325123, + "loss": 1.8587, + "step": 2544 + }, + { + "epoch": 1.642995480955455, + "grad_norm": 1.1452468511220524, + "learning_rate": 0.0002958863494209244, + "loss": 2.3256, + "step": 2545 + }, + { + "epoch": 1.643641058747579, + "grad_norm": 1.4197799856634816, + "learning_rate": 0.000295871309305165, + "loss": 2.437, + "step": 2546 + }, + { + "epoch": 1.644286636539703, + "grad_norm": 1.0985745115571015, + "learning_rate": 0.0002958562421287633, + "loss": 1.987, + "step": 2547 + }, + { + "epoch": 1.644932214331827, + "grad_norm": 1.2088007329118038, + "learning_rate": 0.0002958411478945143, + "loss": 2.5577, + "step": 2548 + }, + { + "epoch": 1.645577792123951, + "grad_norm": 1.1288963107972259, + "learning_rate": 0.00029582602660521826, + "loss": 2.2741, + "step": 2549 + }, + { + "epoch": 1.6462233699160749, + "grad_norm": 1.120370319877317, + "learning_rate": 0.0002958108782636801, + "loss": 2.0572, + "step": 2550 + }, + { + "epoch": 1.6468689477081988, + "grad_norm": 1.067262232666116, + "learning_rate": 0.0002957957028727103, + "loss": 2.1609, + "step": 2551 + }, + { + "epoch": 1.6475145255003227, + "grad_norm": 1.04479974397792, + "learning_rate": 0.00029578050043512383, + "loss": 2.1221, + "step": 2552 + }, + { + "epoch": 1.6481601032924469, + "grad_norm": 1.1463785873257075, + "learning_rate": 0.00029576527095374097, + "loss": 2.4194, + "step": 2553 + }, + { + "epoch": 1.6488056810845708, + "grad_norm": 2.210070889795952, + "learning_rate": 0.00029575001443138697, + "loss": 2.2729, + "step": 2554 + }, + { + "epoch": 1.6494512588766947, + "grad_norm": 1.174879786269563, + "learning_rate": 0.0002957347308708921, + "loss": 2.2926, + "step": 2555 + }, + { + "epoch": 1.6500968366688187, + "grad_norm": 1.2551960132675963, + "learning_rate": 0.00029571942027509154, + "loss": 2.5722, + "step": 2556 + }, + { + "epoch": 1.6507424144609426, + "grad_norm": 1.0954558663419942, + "learning_rate": 0.00029570408264682557, + "loss": 2.1598, + "step": 2557 + }, + { + "epoch": 1.6513879922530665, + "grad_norm": 1.0005238078794465, + "learning_rate": 0.0002956887179889395, + "loss": 1.7517, + "step": 2558 + }, + { + "epoch": 1.6520335700451905, + "grad_norm": 1.1202922805156839, + "learning_rate": 0.0002956733263042837, + "loss": 2.2755, + "step": 2559 + }, + { + "epoch": 1.6526791478373144, + "grad_norm": 1.2033600858459788, + "learning_rate": 0.00029565790759571343, + "loss": 2.3478, + "step": 2560 + }, + { + "epoch": 1.6533247256294383, + "grad_norm": 1.0709779180334826, + "learning_rate": 0.0002956424618660889, + "loss": 2.2583, + "step": 2561 + }, + { + "epoch": 1.6539703034215623, + "grad_norm": 1.1085270322574192, + "learning_rate": 0.00029562698911827573, + "loss": 2.3238, + "step": 2562 + }, + { + "epoch": 1.6546158812136862, + "grad_norm": 1.1450836660171668, + "learning_rate": 0.000295611489355144, + "loss": 2.2692, + "step": 2563 + }, + { + "epoch": 1.6552614590058101, + "grad_norm": 1.105299445567333, + "learning_rate": 0.0002955959625795692, + "loss": 2.169, + "step": 2564 + }, + { + "epoch": 1.655907036797934, + "grad_norm": 1.1557256720444737, + "learning_rate": 0.0002955804087944318, + "loss": 2.2276, + "step": 2565 + }, + { + "epoch": 1.656552614590058, + "grad_norm": 1.1714550548482001, + "learning_rate": 0.000295564828002617, + "loss": 2.3894, + "step": 2566 + }, + { + "epoch": 1.657198192382182, + "grad_norm": 1.1256076290648955, + "learning_rate": 0.0002955492202070153, + "loss": 2.3185, + "step": 2567 + }, + { + "epoch": 1.6578437701743058, + "grad_norm": 1.2414533905967962, + "learning_rate": 0.0002955335854105221, + "loss": 2.3907, + "step": 2568 + }, + { + "epoch": 1.65848934796643, + "grad_norm": 1.2348421642058693, + "learning_rate": 0.00029551792361603784, + "loss": 2.4034, + "step": 2569 + }, + { + "epoch": 1.659134925758554, + "grad_norm": 1.0206123967510436, + "learning_rate": 0.0002955022348264679, + "loss": 2.1519, + "step": 2570 + }, + { + "epoch": 1.6597805035506779, + "grad_norm": 1.0475360078320537, + "learning_rate": 0.00029548651904472275, + "loss": 2.3587, + "step": 2571 + }, + { + "epoch": 1.6604260813428018, + "grad_norm": 1.2036479787677674, + "learning_rate": 0.00029547077627371777, + "loss": 2.4988, + "step": 2572 + }, + { + "epoch": 1.6610716591349257, + "grad_norm": 1.161796574259396, + "learning_rate": 0.0002954550065163735, + "loss": 2.3931, + "step": 2573 + }, + { + "epoch": 1.6617172369270499, + "grad_norm": 1.1449006816269838, + "learning_rate": 0.0002954392097756153, + "loss": 2.2584, + "step": 2574 + }, + { + "epoch": 1.6623628147191738, + "grad_norm": 1.060893000139148, + "learning_rate": 0.00029542338605437375, + "loss": 2.2058, + "step": 2575 + }, + { + "epoch": 1.6630083925112977, + "grad_norm": 1.0646618102808498, + "learning_rate": 0.0002954075353555842, + "loss": 2.3318, + "step": 2576 + }, + { + "epoch": 1.6636539703034217, + "grad_norm": 1.0484846234989567, + "learning_rate": 0.00029539165768218713, + "loss": 1.8663, + "step": 2577 + }, + { + "epoch": 1.6642995480955456, + "grad_norm": 1.2555756651757985, + "learning_rate": 0.00029537575303712805, + "loss": 2.3699, + "step": 2578 + }, + { + "epoch": 1.6649451258876695, + "grad_norm": 1.1252303692416197, + "learning_rate": 0.0002953598214233574, + "loss": 2.2261, + "step": 2579 + }, + { + "epoch": 1.6655907036797934, + "grad_norm": 1.059775659466037, + "learning_rate": 0.0002953438628438308, + "loss": 2.2215, + "step": 2580 + }, + { + "epoch": 1.6662362814719174, + "grad_norm": 0.958766391778922, + "learning_rate": 0.0002953278773015085, + "loss": 1.8077, + "step": 2581 + }, + { + "epoch": 1.6668818592640413, + "grad_norm": 1.2842874418199117, + "learning_rate": 0.0002953118647993562, + "loss": 2.2794, + "step": 2582 + }, + { + "epoch": 1.6675274370561652, + "grad_norm": 1.146543493596562, + "learning_rate": 0.0002952958253403442, + "loss": 2.1738, + "step": 2583 + }, + { + "epoch": 1.6681730148482892, + "grad_norm": 1.077538475467046, + "learning_rate": 0.0002952797589274481, + "loss": 2.0833, + "step": 2584 + }, + { + "epoch": 1.668818592640413, + "grad_norm": 1.1158130206548436, + "learning_rate": 0.0002952636655636484, + "loss": 2.625, + "step": 2585 + }, + { + "epoch": 1.669464170432537, + "grad_norm": 1.1204170488221838, + "learning_rate": 0.0002952475452519306, + "loss": 2.2466, + "step": 2586 + }, + { + "epoch": 1.670109748224661, + "grad_norm": 1.132325798580743, + "learning_rate": 0.000295231397995285, + "loss": 2.2571, + "step": 2587 + }, + { + "epoch": 1.670755326016785, + "grad_norm": 1.2042914054562752, + "learning_rate": 0.0002952152237967073, + "loss": 2.4049, + "step": 2588 + }, + { + "epoch": 1.6714009038089088, + "grad_norm": 1.329865895196638, + "learning_rate": 0.00029519902265919787, + "loss": 2.3914, + "step": 2589 + }, + { + "epoch": 1.672046481601033, + "grad_norm": 1.0888650500885613, + "learning_rate": 0.0002951827945857622, + "loss": 2.2809, + "step": 2590 + }, + { + "epoch": 1.672692059393157, + "grad_norm": 1.0929307082969002, + "learning_rate": 0.00029516653957941086, + "loss": 2.2946, + "step": 2591 + }, + { + "epoch": 1.6733376371852808, + "grad_norm": 1.0546158094477198, + "learning_rate": 0.00029515025764315913, + "loss": 1.8067, + "step": 2592 + }, + { + "epoch": 1.6739832149774048, + "grad_norm": 1.0924158392756653, + "learning_rate": 0.0002951339487800277, + "loss": 2.4273, + "step": 2593 + }, + { + "epoch": 1.6746287927695287, + "grad_norm": 1.2291931307317963, + "learning_rate": 0.00029511761299304187, + "loss": 2.3752, + "step": 2594 + }, + { + "epoch": 1.6752743705616526, + "grad_norm": 1.0447392010929295, + "learning_rate": 0.0002951012502852322, + "loss": 2.2293, + "step": 2595 + }, + { + "epoch": 1.6759199483537768, + "grad_norm": 1.0580126993325014, + "learning_rate": 0.00029508486065963404, + "loss": 1.9523, + "step": 2596 + }, + { + "epoch": 1.6765655261459007, + "grad_norm": 1.2190243574190884, + "learning_rate": 0.0002950684441192879, + "loss": 2.3787, + "step": 2597 + }, + { + "epoch": 1.6772111039380246, + "grad_norm": 1.491784047875167, + "learning_rate": 0.0002950520006672393, + "loss": 2.4185, + "step": 2598 + }, + { + "epoch": 1.6778566817301486, + "grad_norm": 1.1942064406055037, + "learning_rate": 0.0002950355303065385, + "loss": 2.324, + "step": 2599 + }, + { + "epoch": 1.6785022595222725, + "grad_norm": 1.1412163740179777, + "learning_rate": 0.00029501903304024105, + "loss": 2.1208, + "step": 2600 + }, + { + "epoch": 1.6791478373143964, + "grad_norm": 1.071605785074989, + "learning_rate": 0.0002950025088714073, + "loss": 2.2754, + "step": 2601 + }, + { + "epoch": 1.6797934151065204, + "grad_norm": 1.2805235343898622, + "learning_rate": 0.00029498595780310267, + "loss": 2.3793, + "step": 2602 + }, + { + "epoch": 1.6804389928986443, + "grad_norm": 1.117860597167582, + "learning_rate": 0.00029496937983839757, + "loss": 2.4135, + "step": 2603 + }, + { + "epoch": 1.6810845706907682, + "grad_norm": 1.1286196496568606, + "learning_rate": 0.00029495277498036733, + "loss": 2.2126, + "step": 2604 + }, + { + "epoch": 1.6817301484828922, + "grad_norm": 1.2363609823257058, + "learning_rate": 0.00029493614323209246, + "loss": 2.1903, + "step": 2605 + }, + { + "epoch": 1.682375726275016, + "grad_norm": 1.1855338667105093, + "learning_rate": 0.0002949194845966582, + "loss": 2.1941, + "step": 2606 + }, + { + "epoch": 1.68302130406714, + "grad_norm": 1.1238243362859661, + "learning_rate": 0.0002949027990771549, + "loss": 2.2321, + "step": 2607 + }, + { + "epoch": 1.683666881859264, + "grad_norm": 1.0606187072658917, + "learning_rate": 0.00029488608667667794, + "loss": 2.0847, + "step": 2608 + }, + { + "epoch": 1.6843124596513879, + "grad_norm": 1.1222214191164093, + "learning_rate": 0.0002948693473983276, + "loss": 2.1335, + "step": 2609 + }, + { + "epoch": 1.6849580374435118, + "grad_norm": 1.1542793884881033, + "learning_rate": 0.0002948525812452093, + "loss": 2.4603, + "step": 2610 + }, + { + "epoch": 1.6856036152356357, + "grad_norm": 1.1079911495262433, + "learning_rate": 0.0002948357882204332, + "loss": 2.214, + "step": 2611 + }, + { + "epoch": 1.6862491930277599, + "grad_norm": 1.1265925249505946, + "learning_rate": 0.0002948189683271147, + "loss": 2.1251, + "step": 2612 + }, + { + "epoch": 1.6868947708198838, + "grad_norm": 1.154393455455338, + "learning_rate": 0.00029480212156837395, + "loss": 2.2863, + "step": 2613 + }, + { + "epoch": 1.6875403486120077, + "grad_norm": 1.2591391580358144, + "learning_rate": 0.00029478524794733626, + "loss": 2.4717, + "step": 2614 + }, + { + "epoch": 1.6881859264041317, + "grad_norm": 1.1091501907698036, + "learning_rate": 0.00029476834746713183, + "loss": 2.2386, + "step": 2615 + }, + { + "epoch": 1.6888315041962556, + "grad_norm": 1.0297188189827782, + "learning_rate": 0.00029475142013089594, + "loss": 2.1966, + "step": 2616 + }, + { + "epoch": 1.6894770819883798, + "grad_norm": 1.3091068413376528, + "learning_rate": 0.00029473446594176873, + "loss": 2.2302, + "step": 2617 + }, + { + "epoch": 1.6901226597805037, + "grad_norm": 1.1802937522855708, + "learning_rate": 0.00029471748490289535, + "loss": 2.254, + "step": 2618 + }, + { + "epoch": 1.6907682375726276, + "grad_norm": 0.9921796760900075, + "learning_rate": 0.00029470047701742606, + "loss": 1.9875, + "step": 2619 + }, + { + "epoch": 1.6914138153647515, + "grad_norm": 1.2424577104225472, + "learning_rate": 0.00029468344228851594, + "loss": 1.8779, + "step": 2620 + }, + { + "epoch": 1.6920593931568755, + "grad_norm": 1.191892094203821, + "learning_rate": 0.00029466638071932506, + "loss": 2.4221, + "step": 2621 + }, + { + "epoch": 1.6927049709489994, + "grad_norm": 1.0394134195884737, + "learning_rate": 0.0002946492923130186, + "loss": 1.8217, + "step": 2622 + }, + { + "epoch": 1.6933505487411233, + "grad_norm": 1.0926305305545203, + "learning_rate": 0.0002946321770727666, + "loss": 2.1944, + "step": 2623 + }, + { + "epoch": 1.6939961265332473, + "grad_norm": 1.2450275078125348, + "learning_rate": 0.00029461503500174415, + "loss": 2.3802, + "step": 2624 + }, + { + "epoch": 1.6946417043253712, + "grad_norm": 1.1832037905829826, + "learning_rate": 0.00029459786610313123, + "loss": 2.089, + "step": 2625 + }, + { + "epoch": 1.6952872821174951, + "grad_norm": 1.2471849273178996, + "learning_rate": 0.0002945806703801129, + "loss": 1.9771, + "step": 2626 + }, + { + "epoch": 1.695932859909619, + "grad_norm": 1.0783898515970798, + "learning_rate": 0.000294563447835879, + "loss": 2.0996, + "step": 2627 + }, + { + "epoch": 1.696578437701743, + "grad_norm": 1.0799880588126283, + "learning_rate": 0.0002945461984736247, + "loss": 2.3198, + "step": 2628 + }, + { + "epoch": 1.697224015493867, + "grad_norm": 1.27580559798024, + "learning_rate": 0.00029452892229654985, + "loss": 2.3039, + "step": 2629 + }, + { + "epoch": 1.6978695932859909, + "grad_norm": 1.1419803869568235, + "learning_rate": 0.0002945116193078593, + "loss": 2.3331, + "step": 2630 + }, + { + "epoch": 1.6985151710781148, + "grad_norm": 1.0482024795524576, + "learning_rate": 0.00029449428951076304, + "loss": 2.1442, + "step": 2631 + }, + { + "epoch": 1.6991607488702387, + "grad_norm": 1.1509095865890955, + "learning_rate": 0.00029447693290847584, + "loss": 2.3091, + "step": 2632 + }, + { + "epoch": 1.6998063266623629, + "grad_norm": 1.163066324710062, + "learning_rate": 0.0002944595495042176, + "loss": 2.379, + "step": 2633 + }, + { + "epoch": 1.7004519044544868, + "grad_norm": 1.1138947847543645, + "learning_rate": 0.00029444213930121305, + "loss": 2.3762, + "step": 2634 + }, + { + "epoch": 1.7010974822466107, + "grad_norm": 1.1728274438680708, + "learning_rate": 0.000294424702302692, + "loss": 2.2793, + "step": 2635 + }, + { + "epoch": 1.7017430600387347, + "grad_norm": 1.058269204026645, + "learning_rate": 0.0002944072385118892, + "loss": 2.2524, + "step": 2636 + }, + { + "epoch": 1.7023886378308586, + "grad_norm": 1.2238395219390472, + "learning_rate": 0.0002943897479320444, + "loss": 2.3833, + "step": 2637 + }, + { + "epoch": 1.7030342156229825, + "grad_norm": 1.1294122268460616, + "learning_rate": 0.0002943722305664022, + "loss": 2.3664, + "step": 2638 + }, + { + "epoch": 1.7036797934151067, + "grad_norm": 1.0770790801192571, + "learning_rate": 0.0002943546864182123, + "loss": 2.3123, + "step": 2639 + }, + { + "epoch": 1.7043253712072306, + "grad_norm": 1.1135677182902395, + "learning_rate": 0.0002943371154907293, + "loss": 2.0758, + "step": 2640 + }, + { + "epoch": 1.7049709489993545, + "grad_norm": 1.189428805464352, + "learning_rate": 0.0002943195177872128, + "loss": 2.4474, + "step": 2641 + }, + { + "epoch": 1.7056165267914785, + "grad_norm": 1.0881071145028676, + "learning_rate": 0.0002943018933109274, + "loss": 2.4782, + "step": 2642 + }, + { + "epoch": 1.7062621045836024, + "grad_norm": 1.1807091762837887, + "learning_rate": 0.00029428424206514263, + "loss": 2.3771, + "step": 2643 + }, + { + "epoch": 1.7069076823757263, + "grad_norm": 1.0073710950961114, + "learning_rate": 0.0002942665640531329, + "loss": 2.3375, + "step": 2644 + }, + { + "epoch": 1.7075532601678503, + "grad_norm": 1.0651322376972305, + "learning_rate": 0.0002942488592781777, + "loss": 2.2445, + "step": 2645 + }, + { + "epoch": 1.7081988379599742, + "grad_norm": 1.175638823499768, + "learning_rate": 0.00029423112774356144, + "loss": 2.1873, + "step": 2646 + }, + { + "epoch": 1.7088444157520981, + "grad_norm": 1.0260607254920746, + "learning_rate": 0.00029421336945257344, + "loss": 1.8311, + "step": 2647 + }, + { + "epoch": 1.709489993544222, + "grad_norm": 1.1882128062152335, + "learning_rate": 0.0002941955844085082, + "loss": 2.4579, + "step": 2648 + }, + { + "epoch": 1.710135571336346, + "grad_norm": 1.1051253409005957, + "learning_rate": 0.00029417777261466497, + "loss": 2.2849, + "step": 2649 + }, + { + "epoch": 1.71078114912847, + "grad_norm": 1.2131100685345928, + "learning_rate": 0.0002941599340743479, + "loss": 2.4566, + "step": 2650 + }, + { + "epoch": 1.7114267269205938, + "grad_norm": 1.3354486023657586, + "learning_rate": 0.00029414206879086645, + "loss": 2.5889, + "step": 2651 + }, + { + "epoch": 1.7120723047127178, + "grad_norm": 1.1632956266800802, + "learning_rate": 0.0002941241767675347, + "loss": 2.5168, + "step": 2652 + }, + { + "epoch": 1.7127178825048417, + "grad_norm": 1.2606164836544564, + "learning_rate": 0.0002941062580076717, + "loss": 2.456, + "step": 2653 + }, + { + "epoch": 1.7133634602969656, + "grad_norm": 1.1306919231949268, + "learning_rate": 0.00029408831251460173, + "loss": 2.3013, + "step": 2654 + }, + { + "epoch": 1.7140090380890898, + "grad_norm": 1.1837806723131052, + "learning_rate": 0.0002940703402916538, + "loss": 1.9382, + "step": 2655 + }, + { + "epoch": 1.7146546158812137, + "grad_norm": 1.1081544311713931, + "learning_rate": 0.0002940523413421619, + "loss": 2.2466, + "step": 2656 + }, + { + "epoch": 1.7153001936733376, + "grad_norm": 1.0403935144723728, + "learning_rate": 0.0002940343156694651, + "loss": 2.4792, + "step": 2657 + }, + { + "epoch": 1.7159457714654616, + "grad_norm": 1.1656940212070892, + "learning_rate": 0.00029401626327690735, + "loss": 2.3059, + "step": 2658 + }, + { + "epoch": 1.7165913492575855, + "grad_norm": 1.1165460130893663, + "learning_rate": 0.00029399818416783746, + "loss": 2.2329, + "step": 2659 + }, + { + "epoch": 1.7172369270497096, + "grad_norm": 1.1254861927078055, + "learning_rate": 0.0002939800783456094, + "loss": 2.4732, + "step": 2660 + }, + { + "epoch": 1.7178825048418336, + "grad_norm": 1.075332422251167, + "learning_rate": 0.00029396194581358194, + "loss": 2.2335, + "step": 2661 + }, + { + "epoch": 1.7185280826339575, + "grad_norm": 1.0639380186433633, + "learning_rate": 0.00029394378657511886, + "loss": 2.3189, + "step": 2662 + }, + { + "epoch": 1.7191736604260814, + "grad_norm": 1.115946519380027, + "learning_rate": 0.0002939256006335889, + "loss": 2.1542, + "step": 2663 + }, + { + "epoch": 1.7198192382182054, + "grad_norm": 1.2616150907333752, + "learning_rate": 0.00029390738799236566, + "loss": 2.3452, + "step": 2664 + }, + { + "epoch": 1.7204648160103293, + "grad_norm": 1.2122320823000243, + "learning_rate": 0.0002938891486548279, + "loss": 2.4266, + "step": 2665 + }, + { + "epoch": 1.7211103938024532, + "grad_norm": 1.1125756777136842, + "learning_rate": 0.00029387088262435913, + "loss": 1.9963, + "step": 2666 + }, + { + "epoch": 1.7217559715945772, + "grad_norm": 1.213340010593381, + "learning_rate": 0.00029385258990434785, + "loss": 2.2858, + "step": 2667 + }, + { + "epoch": 1.722401549386701, + "grad_norm": 1.2044251211401553, + "learning_rate": 0.0002938342704981877, + "loss": 2.3144, + "step": 2668 + }, + { + "epoch": 1.723047127178825, + "grad_norm": 1.0716960964766848, + "learning_rate": 0.00029381592440927694, + "loss": 2.4359, + "step": 2669 + }, + { + "epoch": 1.723692704970949, + "grad_norm": 1.214063881653296, + "learning_rate": 0.0002937975516410191, + "loss": 2.5264, + "step": 2670 + }, + { + "epoch": 1.7243382827630729, + "grad_norm": 1.1744697822981576, + "learning_rate": 0.00029377915219682245, + "loss": 2.2765, + "step": 2671 + }, + { + "epoch": 1.7249838605551968, + "grad_norm": 1.271200902526821, + "learning_rate": 0.00029376072608010024, + "loss": 2.1603, + "step": 2672 + }, + { + "epoch": 1.7256294383473207, + "grad_norm": 1.159064477657319, + "learning_rate": 0.00029374227329427083, + "loss": 2.19, + "step": 2673 + }, + { + "epoch": 1.7262750161394447, + "grad_norm": 1.1348581101853834, + "learning_rate": 0.0002937237938427573, + "loss": 2.4876, + "step": 2674 + }, + { + "epoch": 1.7269205939315686, + "grad_norm": 1.1564363887871665, + "learning_rate": 0.0002937052877289878, + "loss": 2.1805, + "step": 2675 + }, + { + "epoch": 1.7275661717236928, + "grad_norm": 1.2149093452073172, + "learning_rate": 0.0002936867549563954, + "loss": 2.537, + "step": 2676 + }, + { + "epoch": 1.7282117495158167, + "grad_norm": 1.1335632451993103, + "learning_rate": 0.0002936681955284182, + "loss": 2.1991, + "step": 2677 + }, + { + "epoch": 1.7288573273079406, + "grad_norm": 1.1106273616965148, + "learning_rate": 0.00029364960944849904, + "loss": 2.2923, + "step": 2678 + }, + { + "epoch": 1.7295029051000645, + "grad_norm": 1.0324967635555156, + "learning_rate": 0.00029363099672008593, + "loss": 1.8676, + "step": 2679 + }, + { + "epoch": 1.7301484828921885, + "grad_norm": 1.1025550327631506, + "learning_rate": 0.0002936123573466317, + "loss": 2.3375, + "step": 2680 + }, + { + "epoch": 1.7307940606843124, + "grad_norm": 1.1548233668192263, + "learning_rate": 0.00029359369133159406, + "loss": 2.2233, + "step": 2681 + }, + { + "epoch": 1.7314396384764366, + "grad_norm": 1.1495295236183634, + "learning_rate": 0.0002935749986784359, + "loss": 1.9834, + "step": 2682 + }, + { + "epoch": 1.7320852162685605, + "grad_norm": 1.2081671938253482, + "learning_rate": 0.00029355627939062483, + "loss": 2.3138, + "step": 2683 + }, + { + "epoch": 1.7327307940606844, + "grad_norm": 1.1243784239160997, + "learning_rate": 0.0002935375334716335, + "loss": 2.0451, + "step": 2684 + }, + { + "epoch": 1.7333763718528084, + "grad_norm": 1.225786845318912, + "learning_rate": 0.0002935187609249393, + "loss": 2.4403, + "step": 2685 + }, + { + "epoch": 1.7340219496449323, + "grad_norm": 1.2156686223347504, + "learning_rate": 0.000293499961754025, + "loss": 2.4554, + "step": 2686 + }, + { + "epoch": 1.7346675274370562, + "grad_norm": 1.0208427464603025, + "learning_rate": 0.0002934811359623779, + "loss": 2.3279, + "step": 2687 + }, + { + "epoch": 1.7353131052291801, + "grad_norm": 1.0161672913063806, + "learning_rate": 0.0002934622835534904, + "loss": 2.1389, + "step": 2688 + }, + { + "epoch": 1.735958683021304, + "grad_norm": 1.0157023207613274, + "learning_rate": 0.00029344340453085977, + "loss": 1.9293, + "step": 2689 + }, + { + "epoch": 1.736604260813428, + "grad_norm": 1.1123237200084009, + "learning_rate": 0.0002934244988979884, + "loss": 2.2428, + "step": 2690 + }, + { + "epoch": 1.737249838605552, + "grad_norm": 1.1280673308818616, + "learning_rate": 0.0002934055666583833, + "loss": 2.357, + "step": 2691 + }, + { + "epoch": 1.7378954163976759, + "grad_norm": 1.1547675913921363, + "learning_rate": 0.0002933866078155567, + "loss": 2.4867, + "step": 2692 + }, + { + "epoch": 1.7385409941897998, + "grad_norm": 1.101360696132006, + "learning_rate": 0.0002933676223730257, + "loss": 2.082, + "step": 2693 + }, + { + "epoch": 1.7391865719819237, + "grad_norm": 1.1149566668950304, + "learning_rate": 0.0002933486103343122, + "loss": 2.5049, + "step": 2694 + }, + { + "epoch": 1.7398321497740477, + "grad_norm": 1.075138965098543, + "learning_rate": 0.0002933295717029432, + "loss": 2.1834, + "step": 2695 + }, + { + "epoch": 1.7404777275661716, + "grad_norm": 1.068273659971974, + "learning_rate": 0.0002933105064824505, + "loss": 2.436, + "step": 2696 + }, + { + "epoch": 1.7411233053582955, + "grad_norm": 1.137916456088525, + "learning_rate": 0.000293291414676371, + "loss": 2.4483, + "step": 2697 + }, + { + "epoch": 1.7417688831504197, + "grad_norm": 1.0685823298651094, + "learning_rate": 0.0002932722962882463, + "loss": 2.0574, + "step": 2698 + }, + { + "epoch": 1.7424144609425436, + "grad_norm": 1.2816706964827114, + "learning_rate": 0.0002932531513216231, + "loss": 2.3217, + "step": 2699 + }, + { + "epoch": 1.7430600387346675, + "grad_norm": 1.19633948731407, + "learning_rate": 0.0002932339797800531, + "loss": 2.6251, + "step": 2700 + }, + { + "epoch": 1.7437056165267915, + "grad_norm": 1.058236793301408, + "learning_rate": 0.0002932147816670927, + "loss": 2.4115, + "step": 2701 + }, + { + "epoch": 1.7443511943189154, + "grad_norm": 1.125946063010862, + "learning_rate": 0.0002931955569863034, + "loss": 2.3129, + "step": 2702 + }, + { + "epoch": 1.7449967721110395, + "grad_norm": 1.2255024421605427, + "learning_rate": 0.00029317630574125146, + "loss": 2.4193, + "step": 2703 + }, + { + "epoch": 1.7456423499031635, + "grad_norm": 1.1917488332397814, + "learning_rate": 0.0002931570279355084, + "loss": 2.4906, + "step": 2704 + }, + { + "epoch": 1.7462879276952874, + "grad_norm": 1.2307226133910139, + "learning_rate": 0.0002931377235726503, + "loss": 2.4374, + "step": 2705 + }, + { + "epoch": 1.7469335054874113, + "grad_norm": 1.1231497557323082, + "learning_rate": 0.0002931183926562584, + "loss": 2.2212, + "step": 2706 + }, + { + "epoch": 1.7475790832795353, + "grad_norm": 1.2867764393200194, + "learning_rate": 0.0002930990351899187, + "loss": 2.3776, + "step": 2707 + }, + { + "epoch": 1.7482246610716592, + "grad_norm": 1.4523547849254936, + "learning_rate": 0.0002930796511772223, + "loss": 2.4581, + "step": 2708 + }, + { + "epoch": 1.7488702388637831, + "grad_norm": 1.0846002475268237, + "learning_rate": 0.00029306024062176507, + "loss": 1.9412, + "step": 2709 + }, + { + "epoch": 1.749515816655907, + "grad_norm": 1.2525204877566345, + "learning_rate": 0.00029304080352714787, + "loss": 2.3916, + "step": 2710 + }, + { + "epoch": 1.750161394448031, + "grad_norm": 1.2508537482795181, + "learning_rate": 0.00029302133989697653, + "loss": 2.3535, + "step": 2711 + }, + { + "epoch": 1.750806972240155, + "grad_norm": 1.1123070789340435, + "learning_rate": 0.00029300184973486167, + "loss": 2.1791, + "step": 2712 + }, + { + "epoch": 1.7514525500322788, + "grad_norm": 1.0848030097730326, + "learning_rate": 0.0002929823330444191, + "loss": 2.2559, + "step": 2713 + }, + { + "epoch": 1.7520981278244028, + "grad_norm": 1.1653258314563, + "learning_rate": 0.00029296278982926917, + "loss": 2.1968, + "step": 2714 + }, + { + "epoch": 1.7527437056165267, + "grad_norm": 1.0871863089690763, + "learning_rate": 0.0002929432200930374, + "loss": 1.9989, + "step": 2715 + }, + { + "epoch": 1.7533892834086506, + "grad_norm": 1.1803167780985475, + "learning_rate": 0.0002929236238393542, + "loss": 2.3564, + "step": 2716 + }, + { + "epoch": 1.7540348612007746, + "grad_norm": 1.2168941656714656, + "learning_rate": 0.000292904001071855, + "loss": 2.4932, + "step": 2717 + }, + { + "epoch": 1.7546804389928985, + "grad_norm": 1.3420969594012482, + "learning_rate": 0.00029288435179417976, + "loss": 2.7201, + "step": 2718 + }, + { + "epoch": 1.7553260167850226, + "grad_norm": 1.1720391452561825, + "learning_rate": 0.00029286467600997385, + "loss": 2.3328, + "step": 2719 + }, + { + "epoch": 1.7559715945771466, + "grad_norm": 1.1307410814973284, + "learning_rate": 0.00029284497372288726, + "loss": 2.3417, + "step": 2720 + }, + { + "epoch": 1.7566171723692705, + "grad_norm": 1.0958119754038051, + "learning_rate": 0.00029282524493657496, + "loss": 2.1537, + "step": 2721 + }, + { + "epoch": 1.7572627501613944, + "grad_norm": 0.9825682106635363, + "learning_rate": 0.0002928054896546969, + "loss": 1.8516, + "step": 2722 + }, + { + "epoch": 1.7579083279535184, + "grad_norm": 1.0768759674234438, + "learning_rate": 0.0002927857078809178, + "loss": 2.3118, + "step": 2723 + }, + { + "epoch": 1.7585539057456423, + "grad_norm": 1.220712010098508, + "learning_rate": 0.00029276589961890737, + "loss": 2.4711, + "step": 2724 + }, + { + "epoch": 1.7591994835377665, + "grad_norm": 1.1918214622175618, + "learning_rate": 0.0002927460648723404, + "loss": 2.4952, + "step": 2725 + }, + { + "epoch": 1.7598450613298904, + "grad_norm": 1.0205292851199672, + "learning_rate": 0.00029272620364489624, + "loss": 1.8852, + "step": 2726 + }, + { + "epoch": 1.7604906391220143, + "grad_norm": 1.0690764535931616, + "learning_rate": 0.0002927063159402595, + "loss": 1.8006, + "step": 2727 + }, + { + "epoch": 1.7611362169141382, + "grad_norm": 1.105591816227893, + "learning_rate": 0.00029268640176211957, + "loss": 2.4507, + "step": 2728 + }, + { + "epoch": 1.7617817947062622, + "grad_norm": 1.265849754310102, + "learning_rate": 0.00029266646111417063, + "loss": 2.3684, + "step": 2729 + }, + { + "epoch": 1.762427372498386, + "grad_norm": 1.1206020684250937, + "learning_rate": 0.00029264649400011195, + "loss": 2.4493, + "step": 2730 + }, + { + "epoch": 1.76307295029051, + "grad_norm": 1.1199602645059743, + "learning_rate": 0.0002926265004236476, + "loss": 2.2343, + "step": 2731 + }, + { + "epoch": 1.763718528082634, + "grad_norm": 1.0988769610177391, + "learning_rate": 0.00029260648038848664, + "loss": 2.1251, + "step": 2732 + }, + { + "epoch": 1.764364105874758, + "grad_norm": 1.0230271843066234, + "learning_rate": 0.00029258643389834294, + "loss": 1.8732, + "step": 2733 + }, + { + "epoch": 1.7650096836668818, + "grad_norm": 1.0417726039385677, + "learning_rate": 0.00029256636095693537, + "loss": 2.0168, + "step": 2734 + }, + { + "epoch": 1.7656552614590058, + "grad_norm": 1.1579457322232087, + "learning_rate": 0.0002925462615679876, + "loss": 1.9215, + "step": 2735 + }, + { + "epoch": 1.7663008392511297, + "grad_norm": 1.2666465193150542, + "learning_rate": 0.00029252613573522843, + "loss": 2.2687, + "step": 2736 + }, + { + "epoch": 1.7669464170432536, + "grad_norm": 1.3904147523351345, + "learning_rate": 0.00029250598346239126, + "loss": 2.2482, + "step": 2737 + }, + { + "epoch": 1.7675919948353775, + "grad_norm": 1.1382204306652148, + "learning_rate": 0.0002924858047532146, + "loss": 2.406, + "step": 2738 + }, + { + "epoch": 1.7682375726275015, + "grad_norm": 0.9849033238230575, + "learning_rate": 0.0002924655996114418, + "loss": 2.0794, + "step": 2739 + }, + { + "epoch": 1.7688831504196254, + "grad_norm": 1.1989593513364731, + "learning_rate": 0.0002924453680408212, + "loss": 2.3328, + "step": 2740 + }, + { + "epoch": 1.7695287282117496, + "grad_norm": 1.299486876670537, + "learning_rate": 0.0002924251100451058, + "loss": 2.3724, + "step": 2741 + }, + { + "epoch": 1.7701743060038735, + "grad_norm": 1.2109976507496216, + "learning_rate": 0.00029240482562805384, + "loss": 2.3008, + "step": 2742 + }, + { + "epoch": 1.7708198837959974, + "grad_norm": 1.1227168180156915, + "learning_rate": 0.0002923845147934282, + "loss": 2.491, + "step": 2743 + }, + { + "epoch": 1.7714654615881213, + "grad_norm": 1.1915426965580092, + "learning_rate": 0.00029236417754499675, + "loss": 1.9256, + "step": 2744 + }, + { + "epoch": 1.7721110393802453, + "grad_norm": 1.1289473130491736, + "learning_rate": 0.0002923438138865323, + "loss": 2.3315, + "step": 2745 + }, + { + "epoch": 1.7727566171723694, + "grad_norm": 1.160912000121486, + "learning_rate": 0.0002923234238218125, + "loss": 2.1802, + "step": 2746 + }, + { + "epoch": 1.7734021949644934, + "grad_norm": 1.1339188647716236, + "learning_rate": 0.0002923030073546199, + "loss": 2.4878, + "step": 2747 + }, + { + "epoch": 1.7740477727566173, + "grad_norm": 1.1739649145734206, + "learning_rate": 0.000292282564488742, + "loss": 2.1571, + "step": 2748 + }, + { + "epoch": 1.7746933505487412, + "grad_norm": 1.2670121728466548, + "learning_rate": 0.0002922620952279711, + "loss": 2.2711, + "step": 2749 + }, + { + "epoch": 1.7753389283408652, + "grad_norm": 1.3822086479107427, + "learning_rate": 0.0002922415995761046, + "loss": 2.3495, + "step": 2750 + }, + { + "epoch": 1.775984506132989, + "grad_norm": 1.1730268774377641, + "learning_rate": 0.0002922210775369445, + "loss": 2.3279, + "step": 2751 + }, + { + "epoch": 1.776630083925113, + "grad_norm": 1.0824541149850133, + "learning_rate": 0.00029220052911429795, + "loss": 2.3011, + "step": 2752 + }, + { + "epoch": 1.777275661717237, + "grad_norm": 1.0766124156143917, + "learning_rate": 0.0002921799543119769, + "loss": 1.8494, + "step": 2753 + }, + { + "epoch": 1.7779212395093609, + "grad_norm": 1.2723151905358288, + "learning_rate": 0.0002921593531337981, + "loss": 2.2966, + "step": 2754 + }, + { + "epoch": 1.7785668173014848, + "grad_norm": 1.087434128954802, + "learning_rate": 0.0002921387255835833, + "loss": 2.2617, + "step": 2755 + }, + { + "epoch": 1.7792123950936087, + "grad_norm": 1.1898382940202517, + "learning_rate": 0.00029211807166515917, + "loss": 2.1182, + "step": 2756 + }, + { + "epoch": 1.7798579728857327, + "grad_norm": 1.2119170900006504, + "learning_rate": 0.0002920973913823573, + "loss": 2.5697, + "step": 2757 + }, + { + "epoch": 1.7805035506778566, + "grad_norm": 1.0644314600496874, + "learning_rate": 0.0002920766847390139, + "loss": 2.19, + "step": 2758 + }, + { + "epoch": 1.7811491284699805, + "grad_norm": 1.2958626653469747, + "learning_rate": 0.00029205595173897045, + "loss": 2.5258, + "step": 2759 + }, + { + "epoch": 1.7817947062621045, + "grad_norm": 1.0882573945637373, + "learning_rate": 0.000292035192386073, + "loss": 2.2302, + "step": 2760 + }, + { + "epoch": 1.7824402840542284, + "grad_norm": 1.1113058593263268, + "learning_rate": 0.0002920144066841728, + "loss": 2.2904, + "step": 2761 + }, + { + "epoch": 1.7830858618463525, + "grad_norm": 1.1499815006510143, + "learning_rate": 0.0002919935946371256, + "loss": 2.3859, + "step": 2762 + }, + { + "epoch": 1.7837314396384765, + "grad_norm": 1.1366514224376023, + "learning_rate": 0.0002919727562487924, + "loss": 1.772, + "step": 2763 + }, + { + "epoch": 1.7843770174306004, + "grad_norm": 1.0964622104354853, + "learning_rate": 0.0002919518915230389, + "loss": 2.0849, + "step": 2764 + }, + { + "epoch": 1.7850225952227243, + "grad_norm": 1.3081036757970181, + "learning_rate": 0.0002919310004637357, + "loss": 2.488, + "step": 2765 + }, + { + "epoch": 1.7856681730148483, + "grad_norm": 1.1201777258783374, + "learning_rate": 0.0002919100830747584, + "loss": 2.0394, + "step": 2766 + }, + { + "epoch": 1.7863137508069722, + "grad_norm": 1.0852443640392921, + "learning_rate": 0.00029188913935998724, + "loss": 1.7933, + "step": 2767 + }, + { + "epoch": 1.7869593285990963, + "grad_norm": 1.2427566177545721, + "learning_rate": 0.0002918681693233076, + "loss": 2.1816, + "step": 2768 + }, + { + "epoch": 1.7876049063912203, + "grad_norm": 1.2161970626647804, + "learning_rate": 0.00029184717296860957, + "loss": 2.4295, + "step": 2769 + }, + { + "epoch": 1.7882504841833442, + "grad_norm": 1.046692597507873, + "learning_rate": 0.0002918261502997883, + "loss": 2.2941, + "step": 2770 + }, + { + "epoch": 1.7888960619754681, + "grad_norm": 1.0499854865056832, + "learning_rate": 0.0002918051013207436, + "loss": 2.2044, + "step": 2771 + }, + { + "epoch": 1.789541639767592, + "grad_norm": 1.0854209050483832, + "learning_rate": 0.00029178402603538035, + "loss": 1.9741, + "step": 2772 + }, + { + "epoch": 1.790187217559716, + "grad_norm": 1.1725140697498644, + "learning_rate": 0.00029176292444760824, + "loss": 1.8053, + "step": 2773 + }, + { + "epoch": 1.79083279535184, + "grad_norm": 0.9836952001701811, + "learning_rate": 0.0002917417965613417, + "loss": 1.8244, + "step": 2774 + }, + { + "epoch": 1.7914783731439639, + "grad_norm": 1.082552498142311, + "learning_rate": 0.0002917206423805004, + "loss": 2.0201, + "step": 2775 + }, + { + "epoch": 1.7921239509360878, + "grad_norm": 1.0967274488653822, + "learning_rate": 0.00029169946190900847, + "loss": 2.3187, + "step": 2776 + }, + { + "epoch": 1.7927695287282117, + "grad_norm": 1.2758928401886906, + "learning_rate": 0.00029167825515079515, + "loss": 2.4773, + "step": 2777 + }, + { + "epoch": 1.7934151065203356, + "grad_norm": 1.0732607570140866, + "learning_rate": 0.00029165702210979453, + "loss": 2.158, + "step": 2778 + }, + { + "epoch": 1.7940606843124596, + "grad_norm": 1.106644989451362, + "learning_rate": 0.0002916357627899456, + "loss": 2.2752, + "step": 2779 + }, + { + "epoch": 1.7947062621045835, + "grad_norm": 1.2962722961669906, + "learning_rate": 0.00029161447719519215, + "loss": 1.8354, + "step": 2780 + }, + { + "epoch": 1.7953518398967074, + "grad_norm": 1.091124062763473, + "learning_rate": 0.0002915931653294829, + "loss": 2.3636, + "step": 2781 + }, + { + "epoch": 1.7959974176888314, + "grad_norm": 1.1819237813965822, + "learning_rate": 0.00029157182719677133, + "loss": 2.2894, + "step": 2782 + }, + { + "epoch": 1.7966429954809553, + "grad_norm": 1.25436113635436, + "learning_rate": 0.000291550462801016, + "loss": 2.3282, + "step": 2783 + }, + { + "epoch": 1.7972885732730794, + "grad_norm": 1.1569974664427634, + "learning_rate": 0.0002915290721461802, + "loss": 2.4167, + "step": 2784 + }, + { + "epoch": 1.7979341510652034, + "grad_norm": 1.1297507542447334, + "learning_rate": 0.0002915076552362321, + "loss": 2.3306, + "step": 2785 + }, + { + "epoch": 1.7985797288573273, + "grad_norm": 1.1952054101321563, + "learning_rate": 0.00029148621207514475, + "loss": 1.7329, + "step": 2786 + }, + { + "epoch": 1.7992253066494512, + "grad_norm": 1.1562561819601702, + "learning_rate": 0.00029146474266689604, + "loss": 2.149, + "step": 2787 + }, + { + "epoch": 1.7998708844415752, + "grad_norm": 1.1232537368545297, + "learning_rate": 0.0002914432470154689, + "loss": 2.1985, + "step": 2788 + }, + { + "epoch": 1.8005164622336993, + "grad_norm": 1.126546775693941, + "learning_rate": 0.00029142172512485083, + "loss": 2.4913, + "step": 2789 + }, + { + "epoch": 1.8011620400258233, + "grad_norm": 1.1104387463109482, + "learning_rate": 0.0002914001769990345, + "loss": 2.3378, + "step": 2790 + }, + { + "epoch": 1.8018076178179472, + "grad_norm": 0.9962351292471637, + "learning_rate": 0.00029137860264201725, + "loss": 1.7947, + "step": 2791 + }, + { + "epoch": 1.8024531956100711, + "grad_norm": 1.0285292449537022, + "learning_rate": 0.00029135700205780137, + "loss": 2.1939, + "step": 2792 + }, + { + "epoch": 1.803098773402195, + "grad_norm": 1.091685488765263, + "learning_rate": 0.00029133537525039395, + "loss": 2.2511, + "step": 2793 + }, + { + "epoch": 1.803744351194319, + "grad_norm": 1.070891372126688, + "learning_rate": 0.00029131372222380704, + "loss": 2.3153, + "step": 2794 + }, + { + "epoch": 1.804389928986443, + "grad_norm": 1.0867936636893987, + "learning_rate": 0.00029129204298205744, + "loss": 2.168, + "step": 2795 + }, + { + "epoch": 1.8050355067785668, + "grad_norm": 1.234895632826617, + "learning_rate": 0.00029127033752916694, + "loss": 2.191, + "step": 2796 + }, + { + "epoch": 1.8056810845706908, + "grad_norm": 1.0995633725644605, + "learning_rate": 0.00029124860586916216, + "loss": 1.82, + "step": 2797 + }, + { + "epoch": 1.8063266623628147, + "grad_norm": 1.1026139970857862, + "learning_rate": 0.0002912268480060744, + "loss": 2.2212, + "step": 2798 + }, + { + "epoch": 1.8069722401549386, + "grad_norm": 1.124380969785801, + "learning_rate": 0.0002912050639439401, + "loss": 2.4113, + "step": 2799 + }, + { + "epoch": 1.8076178179470626, + "grad_norm": 1.1308784570249106, + "learning_rate": 0.00029118325368680033, + "loss": 2.3072, + "step": 2800 + }, + { + "epoch": 1.8082633957391865, + "grad_norm": 1.0618075483230365, + "learning_rate": 0.00029116141723870123, + "loss": 2.293, + "step": 2801 + }, + { + "epoch": 1.8089089735313104, + "grad_norm": 1.0964061398558875, + "learning_rate": 0.00029113955460369364, + "loss": 2.2275, + "step": 2802 + }, + { + "epoch": 1.8095545513234343, + "grad_norm": 1.1777820578851759, + "learning_rate": 0.0002911176657858333, + "loss": 2.0652, + "step": 2803 + }, + { + "epoch": 1.8102001291155583, + "grad_norm": 1.1083555125356706, + "learning_rate": 0.0002910957507891808, + "loss": 2.4837, + "step": 2804 + }, + { + "epoch": 1.8108457069076824, + "grad_norm": 1.0812339261453265, + "learning_rate": 0.0002910738096178016, + "loss": 2.5035, + "step": 2805 + }, + { + "epoch": 1.8114912846998064, + "grad_norm": 1.2173675372835004, + "learning_rate": 0.000291051842275766, + "loss": 2.4471, + "step": 2806 + }, + { + "epoch": 1.8121368624919303, + "grad_norm": 1.1859849367242838, + "learning_rate": 0.0002910298487671493, + "loss": 2.3368, + "step": 2807 + }, + { + "epoch": 1.8127824402840542, + "grad_norm": 1.0644847521303047, + "learning_rate": 0.00029100782909603133, + "loss": 2.2823, + "step": 2808 + }, + { + "epoch": 1.8134280180761781, + "grad_norm": 1.185973537049577, + "learning_rate": 0.0002909857832664971, + "loss": 2.343, + "step": 2809 + }, + { + "epoch": 1.814073595868302, + "grad_norm": 1.1890444898604828, + "learning_rate": 0.00029096371128263624, + "loss": 2.4482, + "step": 2810 + }, + { + "epoch": 1.8147191736604262, + "grad_norm": 1.2013561137728375, + "learning_rate": 0.0002909416131485434, + "loss": 2.254, + "step": 2811 + }, + { + "epoch": 1.8153647514525502, + "grad_norm": 1.3211926369695635, + "learning_rate": 0.00029091948886831806, + "loss": 2.37, + "step": 2812 + }, + { + "epoch": 1.816010329244674, + "grad_norm": 1.206999313493083, + "learning_rate": 0.00029089733844606444, + "loss": 2.5233, + "step": 2813 + }, + { + "epoch": 1.816655907036798, + "grad_norm": 1.108741111353479, + "learning_rate": 0.00029087516188589164, + "loss": 2.4274, + "step": 2814 + }, + { + "epoch": 1.817301484828922, + "grad_norm": 1.0528935148795955, + "learning_rate": 0.00029085295919191375, + "loss": 2.2617, + "step": 2815 + }, + { + "epoch": 1.8179470626210459, + "grad_norm": 1.106661123260247, + "learning_rate": 0.00029083073036824945, + "loss": 2.2995, + "step": 2816 + }, + { + "epoch": 1.8185926404131698, + "grad_norm": 1.0456177158578839, + "learning_rate": 0.00029080847541902255, + "loss": 2.3769, + "step": 2817 + }, + { + "epoch": 1.8192382182052937, + "grad_norm": 1.0431968588935647, + "learning_rate": 0.00029078619434836153, + "loss": 2.2003, + "step": 2818 + }, + { + "epoch": 1.8198837959974177, + "grad_norm": 1.0421556948299053, + "learning_rate": 0.00029076388716039976, + "loss": 2.2734, + "step": 2819 + }, + { + "epoch": 1.8205293737895416, + "grad_norm": 1.1353604175995167, + "learning_rate": 0.00029074155385927546, + "loss": 2.0698, + "step": 2820 + }, + { + "epoch": 1.8211749515816655, + "grad_norm": 1.048759996706234, + "learning_rate": 0.00029071919444913165, + "loss": 2.0406, + "step": 2821 + }, + { + "epoch": 1.8218205293737895, + "grad_norm": 1.030465763478065, + "learning_rate": 0.00029069680893411626, + "loss": 1.9844, + "step": 2822 + }, + { + "epoch": 1.8224661071659134, + "grad_norm": 1.074618547271343, + "learning_rate": 0.000290674397318382, + "loss": 2.0072, + "step": 2823 + }, + { + "epoch": 1.8231116849580373, + "grad_norm": 1.0848248673228038, + "learning_rate": 0.0002906519596060866, + "loss": 1.8938, + "step": 2824 + }, + { + "epoch": 1.8237572627501613, + "grad_norm": 1.1587081915184632, + "learning_rate": 0.00029062949580139233, + "loss": 2.3488, + "step": 2825 + }, + { + "epoch": 1.8244028405422852, + "grad_norm": 1.195011495896296, + "learning_rate": 0.0002906070059084665, + "loss": 2.4823, + "step": 2826 + }, + { + "epoch": 1.8250484183344093, + "grad_norm": 1.0933853837775163, + "learning_rate": 0.0002905844899314812, + "loss": 2.2616, + "step": 2827 + }, + { + "epoch": 1.8256939961265333, + "grad_norm": 1.187301298075481, + "learning_rate": 0.0002905619478746135, + "loss": 2.3053, + "step": 2828 + }, + { + "epoch": 1.8263395739186572, + "grad_norm": 1.167713618392874, + "learning_rate": 0.000290539379742045, + "loss": 2.2801, + "step": 2829 + }, + { + "epoch": 1.8269851517107811, + "grad_norm": 1.1462589968139834, + "learning_rate": 0.0002905167855379625, + "loss": 2.4012, + "step": 2830 + }, + { + "epoch": 1.827630729502905, + "grad_norm": 1.1175048012492317, + "learning_rate": 0.00029049416526655736, + "loss": 2.3245, + "step": 2831 + }, + { + "epoch": 1.8282763072950292, + "grad_norm": 1.0520355371508503, + "learning_rate": 0.00029047151893202587, + "loss": 2.2255, + "step": 2832 + }, + { + "epoch": 1.8289218850871531, + "grad_norm": 1.1225598334392937, + "learning_rate": 0.0002904488465385692, + "loss": 2.2052, + "step": 2833 + }, + { + "epoch": 1.829567462879277, + "grad_norm": 1.127303487958318, + "learning_rate": 0.0002904261480903933, + "loss": 2.2266, + "step": 2834 + }, + { + "epoch": 1.830213040671401, + "grad_norm": 1.0687317932648364, + "learning_rate": 0.0002904034235917089, + "loss": 2.3029, + "step": 2835 + }, + { + "epoch": 1.830858618463525, + "grad_norm": 1.0121174554304682, + "learning_rate": 0.00029038067304673176, + "loss": 2.1345, + "step": 2836 + }, + { + "epoch": 1.8315041962556489, + "grad_norm": 1.1051082717766254, + "learning_rate": 0.00029035789645968224, + "loss": 2.3227, + "step": 2837 + }, + { + "epoch": 1.8321497740477728, + "grad_norm": 1.1756596350892732, + "learning_rate": 0.0002903350938347857, + "loss": 2.2492, + "step": 2838 + }, + { + "epoch": 1.8327953518398967, + "grad_norm": 1.2297683719468684, + "learning_rate": 0.00029031226517627225, + "loss": 2.3663, + "step": 2839 + }, + { + "epoch": 1.8334409296320207, + "grad_norm": 1.1909294493813039, + "learning_rate": 0.0002902894104883768, + "loss": 2.4445, + "step": 2840 + }, + { + "epoch": 1.8340865074241446, + "grad_norm": 1.1687116231379489, + "learning_rate": 0.0002902665297753392, + "loss": 2.5438, + "step": 2841 + }, + { + "epoch": 1.8347320852162685, + "grad_norm": 1.0152740773987046, + "learning_rate": 0.00029024362304140393, + "loss": 2.2525, + "step": 2842 + }, + { + "epoch": 1.8353776630083924, + "grad_norm": 1.0310113493369755, + "learning_rate": 0.00029022069029082054, + "loss": 1.7783, + "step": 2843 + }, + { + "epoch": 1.8360232408005164, + "grad_norm": 1.1305173829334878, + "learning_rate": 0.00029019773152784336, + "loss": 2.4227, + "step": 2844 + }, + { + "epoch": 1.8366688185926403, + "grad_norm": 1.0600039550002784, + "learning_rate": 0.0002901747467567313, + "loss": 2.3946, + "step": 2845 + }, + { + "epoch": 1.8373143963847642, + "grad_norm": 1.0986202140461372, + "learning_rate": 0.0002901517359817484, + "loss": 2.2263, + "step": 2846 + }, + { + "epoch": 1.8379599741768882, + "grad_norm": 1.0883865903359835, + "learning_rate": 0.00029012869920716337, + "loss": 2.3677, + "step": 2847 + }, + { + "epoch": 1.8386055519690123, + "grad_norm": 1.1245253033522224, + "learning_rate": 0.0002901056364372497, + "loss": 2.5008, + "step": 2848 + }, + { + "epoch": 1.8392511297611362, + "grad_norm": 1.0474641972564525, + "learning_rate": 0.0002900825476762859, + "loss": 2.2676, + "step": 2849 + }, + { + "epoch": 1.8398967075532602, + "grad_norm": 1.0846300347386848, + "learning_rate": 0.00029005943292855505, + "loss": 2.2705, + "step": 2850 + }, + { + "epoch": 1.840542285345384, + "grad_norm": 1.2376992921067305, + "learning_rate": 0.0002900362921983453, + "loss": 2.2612, + "step": 2851 + }, + { + "epoch": 1.841187863137508, + "grad_norm": 1.1543074010267722, + "learning_rate": 0.00029001312548994937, + "loss": 2.2855, + "step": 2852 + }, + { + "epoch": 1.841833440929632, + "grad_norm": 1.1370854002577782, + "learning_rate": 0.00028998993280766503, + "loss": 2.3221, + "step": 2853 + }, + { + "epoch": 1.8424790187217561, + "grad_norm": 1.1607176521585996, + "learning_rate": 0.00028996671415579473, + "loss": 2.4239, + "step": 2854 + }, + { + "epoch": 1.84312459651388, + "grad_norm": 1.2204281674336714, + "learning_rate": 0.00028994346953864573, + "loss": 2.1915, + "step": 2855 + }, + { + "epoch": 1.843770174306004, + "grad_norm": 1.173242686179476, + "learning_rate": 0.00028992019896053017, + "loss": 2.3631, + "step": 2856 + }, + { + "epoch": 1.844415752098128, + "grad_norm": 1.2051600314614332, + "learning_rate": 0.000289896902425765, + "loss": 2.5351, + "step": 2857 + }, + { + "epoch": 1.8450613298902518, + "grad_norm": 1.0235228470336342, + "learning_rate": 0.00028987357993867196, + "loss": 2.43, + "step": 2858 + }, + { + "epoch": 1.8457069076823758, + "grad_norm": 1.0660823251436153, + "learning_rate": 0.0002898502315035776, + "loss": 2.1768, + "step": 2859 + }, + { + "epoch": 1.8463524854744997, + "grad_norm": 0.9975095914564919, + "learning_rate": 0.0002898268571248133, + "loss": 1.7624, + "step": 2860 + }, + { + "epoch": 1.8469980632666236, + "grad_norm": 1.0292445759711712, + "learning_rate": 0.00028980345680671533, + "loss": 2.2986, + "step": 2861 + }, + { + "epoch": 1.8476436410587476, + "grad_norm": 1.0394663788601104, + "learning_rate": 0.0002897800305536246, + "loss": 2.1514, + "step": 2862 + }, + { + "epoch": 1.8482892188508715, + "grad_norm": 0.9759543935096239, + "learning_rate": 0.00028975657836988693, + "loss": 2.0707, + "step": 2863 + }, + { + "epoch": 1.8489347966429954, + "grad_norm": 1.1313323546925842, + "learning_rate": 0.00028973310025985296, + "loss": 2.3072, + "step": 2864 + }, + { + "epoch": 1.8495803744351194, + "grad_norm": 1.2703724599830892, + "learning_rate": 0.0002897095962278781, + "loss": 2.5566, + "step": 2865 + }, + { + "epoch": 1.8502259522272433, + "grad_norm": 1.109453530479903, + "learning_rate": 0.00028968606627832266, + "loss": 2.5802, + "step": 2866 + }, + { + "epoch": 1.8508715300193672, + "grad_norm": 1.2130297486098303, + "learning_rate": 0.00028966251041555156, + "loss": 2.2868, + "step": 2867 + }, + { + "epoch": 1.8515171078114911, + "grad_norm": 1.1706985618982428, + "learning_rate": 0.0002896389286439348, + "loss": 2.3572, + "step": 2868 + }, + { + "epoch": 1.852162685603615, + "grad_norm": 1.2356739151667187, + "learning_rate": 0.00028961532096784704, + "loss": 1.7149, + "step": 2869 + }, + { + "epoch": 1.8528082633957392, + "grad_norm": 1.1087446169961521, + "learning_rate": 0.00028959168739166763, + "loss": 2.1748, + "step": 2870 + }, + { + "epoch": 1.8534538411878632, + "grad_norm": 1.2127729756016699, + "learning_rate": 0.0002895680279197809, + "loss": 2.5507, + "step": 2871 + }, + { + "epoch": 1.854099418979987, + "grad_norm": 1.0988873551915694, + "learning_rate": 0.00028954434255657596, + "loss": 2.4064, + "step": 2872 + }, + { + "epoch": 1.854744996772111, + "grad_norm": 1.2141124160832124, + "learning_rate": 0.00028952063130644666, + "loss": 2.3909, + "step": 2873 + }, + { + "epoch": 1.855390574564235, + "grad_norm": 1.224654345418532, + "learning_rate": 0.0002894968941737917, + "loss": 2.1905, + "step": 2874 + }, + { + "epoch": 1.856036152356359, + "grad_norm": 1.0731618940153445, + "learning_rate": 0.00028947313116301453, + "loss": 2.2673, + "step": 2875 + }, + { + "epoch": 1.856681730148483, + "grad_norm": 1.1566945734695275, + "learning_rate": 0.00028944934227852345, + "loss": 2.1998, + "step": 2876 + }, + { + "epoch": 1.857327307940607, + "grad_norm": 1.328732298319286, + "learning_rate": 0.0002894255275247316, + "loss": 2.5625, + "step": 2877 + }, + { + "epoch": 1.857972885732731, + "grad_norm": 1.0531107388887022, + "learning_rate": 0.0002894016869060568, + "loss": 2.3125, + "step": 2878 + }, + { + "epoch": 1.8586184635248548, + "grad_norm": 1.046191207113415, + "learning_rate": 0.0002893778204269217, + "loss": 2.3124, + "step": 2879 + }, + { + "epoch": 1.8592640413169788, + "grad_norm": 1.1261645268905152, + "learning_rate": 0.0002893539280917539, + "loss": 2.4809, + "step": 2880 + }, + { + "epoch": 1.8599096191091027, + "grad_norm": 1.1066301390108317, + "learning_rate": 0.0002893300099049856, + "loss": 1.8604, + "step": 2881 + }, + { + "epoch": 1.8605551969012266, + "grad_norm": 1.033377727959755, + "learning_rate": 0.00028930606587105386, + "loss": 2.2448, + "step": 2882 + }, + { + "epoch": 1.8612007746933505, + "grad_norm": 1.2210060679159755, + "learning_rate": 0.0002892820959944006, + "loss": 2.42, + "step": 2883 + }, + { + "epoch": 1.8618463524854745, + "grad_norm": 1.1101340137834028, + "learning_rate": 0.0002892581002794724, + "loss": 2.2011, + "step": 2884 + }, + { + "epoch": 1.8624919302775984, + "grad_norm": 1.10201564342248, + "learning_rate": 0.00028923407873072076, + "loss": 2.2658, + "step": 2885 + }, + { + "epoch": 1.8631375080697223, + "grad_norm": 1.1158759340219062, + "learning_rate": 0.00028921003135260197, + "loss": 1.8121, + "step": 2886 + }, + { + "epoch": 1.8637830858618463, + "grad_norm": 1.19899481786975, + "learning_rate": 0.000289185958149577, + "loss": 2.4404, + "step": 2887 + }, + { + "epoch": 1.8644286636539702, + "grad_norm": 1.1515064610512074, + "learning_rate": 0.0002891618591261117, + "loss": 2.3714, + "step": 2888 + }, + { + "epoch": 1.8650742414460941, + "grad_norm": 1.307979120525815, + "learning_rate": 0.0002891377342866767, + "loss": 2.4022, + "step": 2889 + }, + { + "epoch": 1.865719819238218, + "grad_norm": 1.0875066420017308, + "learning_rate": 0.0002891135836357474, + "loss": 1.9276, + "step": 2890 + }, + { + "epoch": 1.8663653970303422, + "grad_norm": 1.2374796247240436, + "learning_rate": 0.000289089407177804, + "loss": 2.4449, + "step": 2891 + }, + { + "epoch": 1.8670109748224661, + "grad_norm": 1.1386384258567452, + "learning_rate": 0.00028906520491733145, + "loss": 1.7681, + "step": 2892 + }, + { + "epoch": 1.86765655261459, + "grad_norm": 1.1792715918403909, + "learning_rate": 0.0002890409768588196, + "loss": 2.3578, + "step": 2893 + }, + { + "epoch": 1.868302130406714, + "grad_norm": 1.2137220801014954, + "learning_rate": 0.0002890167230067629, + "loss": 2.3306, + "step": 2894 + }, + { + "epoch": 1.868947708198838, + "grad_norm": 0.9822141974141783, + "learning_rate": 0.0002889924433656609, + "loss": 2.1317, + "step": 2895 + }, + { + "epoch": 1.8695932859909619, + "grad_norm": 1.1438827314794995, + "learning_rate": 0.00028896813794001744, + "loss": 1.9435, + "step": 2896 + }, + { + "epoch": 1.870238863783086, + "grad_norm": 1.117106216204231, + "learning_rate": 0.0002889438067343416, + "loss": 2.1479, + "step": 2897 + }, + { + "epoch": 1.87088444157521, + "grad_norm": 1.1659056486141903, + "learning_rate": 0.000288919449753147, + "loss": 2.1272, + "step": 2898 + }, + { + "epoch": 1.8715300193673339, + "grad_norm": 1.01960786580675, + "learning_rate": 0.0002888950670009522, + "loss": 2.2284, + "step": 2899 + }, + { + "epoch": 1.8721755971594578, + "grad_norm": 1.068354139554252, + "learning_rate": 0.00028887065848228036, + "loss": 2.0178, + "step": 2900 + }, + { + "epoch": 1.8728211749515817, + "grad_norm": 1.0802269948921184, + "learning_rate": 0.0002888462242016596, + "loss": 2.2644, + "step": 2901 + }, + { + "epoch": 1.8734667527437057, + "grad_norm": 1.0822498282481419, + "learning_rate": 0.00028882176416362266, + "loss": 2.3154, + "step": 2902 + }, + { + "epoch": 1.8741123305358296, + "grad_norm": 1.1331937845418614, + "learning_rate": 0.0002887972783727072, + "loss": 2.1011, + "step": 2903 + }, + { + "epoch": 1.8747579083279535, + "grad_norm": 1.14647116179675, + "learning_rate": 0.0002887727668334555, + "loss": 2.5205, + "step": 2904 + }, + { + "epoch": 1.8754034861200775, + "grad_norm": 1.151575492116404, + "learning_rate": 0.0002887482295504148, + "loss": 2.268, + "step": 2905 + }, + { + "epoch": 1.8760490639122014, + "grad_norm": 1.1236426159145616, + "learning_rate": 0.00028872366652813694, + "loss": 2.14, + "step": 2906 + }, + { + "epoch": 1.8766946417043253, + "grad_norm": 1.1215421111486625, + "learning_rate": 0.00028869907777117867, + "loss": 1.8798, + "step": 2907 + }, + { + "epoch": 1.8773402194964492, + "grad_norm": 1.1390066610119116, + "learning_rate": 0.0002886744632841014, + "loss": 2.3793, + "step": 2908 + }, + { + "epoch": 1.8779857972885732, + "grad_norm": 1.038513678360869, + "learning_rate": 0.0002886498230714715, + "loss": 2.134, + "step": 2909 + }, + { + "epoch": 1.878631375080697, + "grad_norm": 1.1062219346629274, + "learning_rate": 0.0002886251571378598, + "loss": 2.3719, + "step": 2910 + }, + { + "epoch": 1.879276952872821, + "grad_norm": 1.05673809500772, + "learning_rate": 0.00028860046548784225, + "loss": 2.2492, + "step": 2911 + }, + { + "epoch": 1.879922530664945, + "grad_norm": 1.0954878683877756, + "learning_rate": 0.0002885757481259993, + "loss": 2.5626, + "step": 2912 + }, + { + "epoch": 1.8805681084570691, + "grad_norm": 1.097083961159303, + "learning_rate": 0.0002885510050569163, + "loss": 2.0506, + "step": 2913 + }, + { + "epoch": 1.881213686249193, + "grad_norm": 1.0719824034081753, + "learning_rate": 0.0002885262362851834, + "loss": 1.9625, + "step": 2914 + }, + { + "epoch": 1.881859264041317, + "grad_norm": 1.1444347358646478, + "learning_rate": 0.0002885014418153955, + "loss": 2.4274, + "step": 2915 + }, + { + "epoch": 1.882504841833441, + "grad_norm": 1.0837641819486028, + "learning_rate": 0.0002884766216521521, + "loss": 2.299, + "step": 2916 + }, + { + "epoch": 1.8831504196255648, + "grad_norm": 1.0688684666607682, + "learning_rate": 0.00028845177580005767, + "loss": 2.0439, + "step": 2917 + }, + { + "epoch": 1.883795997417689, + "grad_norm": 1.2172046046523801, + "learning_rate": 0.00028842690426372135, + "loss": 2.3088, + "step": 2918 + }, + { + "epoch": 1.884441575209813, + "grad_norm": 1.11760934416338, + "learning_rate": 0.00028840200704775707, + "loss": 2.2314, + "step": 2919 + }, + { + "epoch": 1.8850871530019369, + "grad_norm": 1.036120748602373, + "learning_rate": 0.0002883770841567835, + "loss": 2.2827, + "step": 2920 + }, + { + "epoch": 1.8857327307940608, + "grad_norm": 1.1915594244097132, + "learning_rate": 0.00028835213559542423, + "loss": 2.4788, + "step": 2921 + }, + { + "epoch": 1.8863783085861847, + "grad_norm": 1.088782903126151, + "learning_rate": 0.0002883271613683073, + "loss": 2.444, + "step": 2922 + }, + { + "epoch": 1.8870238863783086, + "grad_norm": 1.0517398489092933, + "learning_rate": 0.0002883021614800658, + "loss": 1.9491, + "step": 2923 + }, + { + "epoch": 1.8876694641704326, + "grad_norm": 1.1356980315875582, + "learning_rate": 0.00028827713593533745, + "loss": 2.4368, + "step": 2924 + }, + { + "epoch": 1.8883150419625565, + "grad_norm": 1.0286387498996816, + "learning_rate": 0.0002882520847387647, + "loss": 2.0822, + "step": 2925 + }, + { + "epoch": 1.8889606197546804, + "grad_norm": 1.2292215986913904, + "learning_rate": 0.0002882270078949948, + "loss": 2.6742, + "step": 2926 + }, + { + "epoch": 1.8896061975468044, + "grad_norm": 1.0904954786444103, + "learning_rate": 0.00028820190540867986, + "loss": 2.1898, + "step": 2927 + }, + { + "epoch": 1.8902517753389283, + "grad_norm": 1.0845791718837763, + "learning_rate": 0.00028817677728447654, + "loss": 2.1658, + "step": 2928 + }, + { + "epoch": 1.8908973531310522, + "grad_norm": 1.1305799900514228, + "learning_rate": 0.0002881516235270465, + "loss": 2.5109, + "step": 2929 + }, + { + "epoch": 1.8915429309231762, + "grad_norm": 1.3249450961879565, + "learning_rate": 0.0002881264441410559, + "loss": 2.6356, + "step": 2930 + }, + { + "epoch": 1.8921885087153, + "grad_norm": 1.2123721922895296, + "learning_rate": 0.0002881012391311758, + "loss": 2.3428, + "step": 2931 + }, + { + "epoch": 1.892834086507424, + "grad_norm": 1.070860252706579, + "learning_rate": 0.0002880760085020821, + "loss": 2.1054, + "step": 2932 + }, + { + "epoch": 1.893479664299548, + "grad_norm": 1.157427364036869, + "learning_rate": 0.00028805075225845516, + "loss": 2.3412, + "step": 2933 + }, + { + "epoch": 1.894125242091672, + "grad_norm": 1.2529287013227983, + "learning_rate": 0.00028802547040498045, + "loss": 2.4664, + "step": 2934 + }, + { + "epoch": 1.894770819883796, + "grad_norm": 1.144445666501428, + "learning_rate": 0.00028800016294634794, + "loss": 1.9144, + "step": 2935 + }, + { + "epoch": 1.89541639767592, + "grad_norm": 1.216385435821551, + "learning_rate": 0.0002879748298872523, + "loss": 2.2066, + "step": 2936 + }, + { + "epoch": 1.896061975468044, + "grad_norm": 1.1210213529205886, + "learning_rate": 0.00028794947123239335, + "loss": 2.1397, + "step": 2937 + }, + { + "epoch": 1.8967075532601678, + "grad_norm": 1.018313718391025, + "learning_rate": 0.00028792408698647516, + "loss": 1.86, + "step": 2938 + }, + { + "epoch": 1.8973531310522918, + "grad_norm": 1.1000612524826467, + "learning_rate": 0.00028789867715420687, + "loss": 2.2632, + "step": 2939 + }, + { + "epoch": 1.897998708844416, + "grad_norm": 1.0656976562711131, + "learning_rate": 0.0002878732417403022, + "loss": 2.3592, + "step": 2940 + }, + { + "epoch": 1.8986442866365398, + "grad_norm": 1.050910639606957, + "learning_rate": 0.0002878477807494798, + "loss": 2.168, + "step": 2941 + }, + { + "epoch": 1.8992898644286638, + "grad_norm": 1.2150891764779883, + "learning_rate": 0.00028782229418646287, + "loss": 2.3827, + "step": 2942 + }, + { + "epoch": 1.8999354422207877, + "grad_norm": 1.2012381826470142, + "learning_rate": 0.0002877967820559794, + "loss": 2.2984, + "step": 2943 + }, + { + "epoch": 1.9005810200129116, + "grad_norm": 1.1068695594672395, + "learning_rate": 0.0002877712443627622, + "loss": 2.3806, + "step": 2944 + }, + { + "epoch": 1.9012265978050356, + "grad_norm": 1.032783168354352, + "learning_rate": 0.00028774568111154884, + "loss": 2.1142, + "step": 2945 + }, + { + "epoch": 1.9018721755971595, + "grad_norm": 1.213331485031215, + "learning_rate": 0.0002877200923070814, + "loss": 2.4177, + "step": 2946 + }, + { + "epoch": 1.9025177533892834, + "grad_norm": 1.2931821516586928, + "learning_rate": 0.000287694477954107, + "loss": 2.6455, + "step": 2947 + }, + { + "epoch": 1.9031633311814073, + "grad_norm": 1.155643846364681, + "learning_rate": 0.00028766883805737733, + "loss": 2.4363, + "step": 2948 + }, + { + "epoch": 1.9038089089735313, + "grad_norm": 1.0592761860566633, + "learning_rate": 0.00028764317262164893, + "loss": 2.2708, + "step": 2949 + }, + { + "epoch": 1.9044544867656552, + "grad_norm": 1.0241120667636845, + "learning_rate": 0.00028761748165168283, + "loss": 2.1836, + "step": 2950 + }, + { + "epoch": 1.9051000645577791, + "grad_norm": 1.2361100431675516, + "learning_rate": 0.0002875917651522452, + "loss": 2.2685, + "step": 2951 + }, + { + "epoch": 1.905745642349903, + "grad_norm": 1.1262678004429933, + "learning_rate": 0.00028756602312810653, + "loss": 2.3479, + "step": 2952 + }, + { + "epoch": 1.906391220142027, + "grad_norm": 1.1387483770284206, + "learning_rate": 0.0002875402555840423, + "loss": 2.0448, + "step": 2953 + }, + { + "epoch": 1.907036797934151, + "grad_norm": 1.2458064520484071, + "learning_rate": 0.00028751446252483267, + "loss": 2.3175, + "step": 2954 + }, + { + "epoch": 1.9076823757262749, + "grad_norm": 1.1868503155621386, + "learning_rate": 0.00028748864395526245, + "loss": 2.1522, + "step": 2955 + }, + { + "epoch": 1.908327953518399, + "grad_norm": 1.1446130719594805, + "learning_rate": 0.0002874627998801214, + "loss": 1.8892, + "step": 2956 + }, + { + "epoch": 1.908973531310523, + "grad_norm": 1.1337292688948166, + "learning_rate": 0.0002874369303042037, + "loss": 2.1875, + "step": 2957 + }, + { + "epoch": 1.9096191091026469, + "grad_norm": 1.127673689256308, + "learning_rate": 0.00028741103523230856, + "loss": 2.1471, + "step": 2958 + }, + { + "epoch": 1.9102646868947708, + "grad_norm": 1.3620136516777641, + "learning_rate": 0.0002873851146692397, + "loss": 2.2238, + "step": 2959 + }, + { + "epoch": 1.9109102646868947, + "grad_norm": 1.1719025684432018, + "learning_rate": 0.00028735916861980567, + "loss": 2.2941, + "step": 2960 + }, + { + "epoch": 1.9115558424790189, + "grad_norm": 1.1819753918036486, + "learning_rate": 0.00028733319708881973, + "loss": 2.3317, + "step": 2961 + }, + { + "epoch": 1.9122014202711428, + "grad_norm": 1.2413816447259516, + "learning_rate": 0.0002873072000810999, + "loss": 1.8845, + "step": 2962 + }, + { + "epoch": 1.9128469980632667, + "grad_norm": 1.0398354418438247, + "learning_rate": 0.00028728117760146887, + "loss": 2.2107, + "step": 2963 + }, + { + "epoch": 1.9134925758553907, + "grad_norm": 1.0312026917016397, + "learning_rate": 0.0002872551296547541, + "loss": 1.7356, + "step": 2964 + }, + { + "epoch": 1.9141381536475146, + "grad_norm": 1.147258426776721, + "learning_rate": 0.00028722905624578766, + "loss": 2.2559, + "step": 2965 + }, + { + "epoch": 1.9147837314396385, + "grad_norm": 1.1589150170227114, + "learning_rate": 0.00028720295737940657, + "loss": 2.5954, + "step": 2966 + }, + { + "epoch": 1.9154293092317625, + "grad_norm": 1.075494272650331, + "learning_rate": 0.00028717683306045237, + "loss": 2.2426, + "step": 2967 + }, + { + "epoch": 1.9160748870238864, + "grad_norm": 1.022203164991847, + "learning_rate": 0.00028715068329377146, + "loss": 2.2369, + "step": 2968 + }, + { + "epoch": 1.9167204648160103, + "grad_norm": 1.1848211124567305, + "learning_rate": 0.00028712450808421477, + "loss": 2.2945, + "step": 2969 + }, + { + "epoch": 1.9173660426081343, + "grad_norm": 1.1403759318172446, + "learning_rate": 0.0002870983074366382, + "loss": 2.2911, + "step": 2970 + }, + { + "epoch": 1.9180116204002582, + "grad_norm": 1.086171806652548, + "learning_rate": 0.00028707208135590214, + "loss": 2.1974, + "step": 2971 + }, + { + "epoch": 1.9186571981923821, + "grad_norm": 1.1516220286274026, + "learning_rate": 0.0002870458298468719, + "loss": 1.7892, + "step": 2972 + }, + { + "epoch": 1.919302775984506, + "grad_norm": 1.1251715363681647, + "learning_rate": 0.00028701955291441733, + "loss": 2.3978, + "step": 2973 + }, + { + "epoch": 1.91994835377663, + "grad_norm": 1.0363182222267868, + "learning_rate": 0.0002869932505634131, + "loss": 2.2028, + "step": 2974 + }, + { + "epoch": 1.920593931568754, + "grad_norm": 1.176567677858251, + "learning_rate": 0.00028696692279873856, + "loss": 2.3695, + "step": 2975 + }, + { + "epoch": 1.9212395093608778, + "grad_norm": 1.3168580145185924, + "learning_rate": 0.00028694056962527784, + "loss": 2.3658, + "step": 2976 + }, + { + "epoch": 1.921885087153002, + "grad_norm": 1.0696718670612315, + "learning_rate": 0.0002869141910479196, + "loss": 2.4849, + "step": 2977 + }, + { + "epoch": 1.922530664945126, + "grad_norm": 1.0849269809101394, + "learning_rate": 0.00028688778707155744, + "loss": 2.1519, + "step": 2978 + }, + { + "epoch": 1.9231762427372499, + "grad_norm": 1.1674727951316735, + "learning_rate": 0.0002868613577010896, + "loss": 2.5143, + "step": 2979 + }, + { + "epoch": 1.9238218205293738, + "grad_norm": 0.9925051792420777, + "learning_rate": 0.00028683490294141887, + "loss": 2.2245, + "step": 2980 + }, + { + "epoch": 1.9244673983214977, + "grad_norm": 1.0414479828394834, + "learning_rate": 0.0002868084227974531, + "loss": 2.4881, + "step": 2981 + }, + { + "epoch": 1.9251129761136216, + "grad_norm": 1.1051337054887351, + "learning_rate": 0.0002867819172741044, + "loss": 2.0123, + "step": 2982 + }, + { + "epoch": 1.9257585539057458, + "grad_norm": 1.1577884612019569, + "learning_rate": 0.0002867553863762899, + "loss": 2.3429, + "step": 2983 + }, + { + "epoch": 1.9264041316978697, + "grad_norm": 1.2268290604838215, + "learning_rate": 0.0002867288301089314, + "loss": 2.5961, + "step": 2984 + }, + { + "epoch": 1.9270497094899937, + "grad_norm": 1.0257359122948604, + "learning_rate": 0.0002867022484769553, + "loss": 2.3165, + "step": 2985 + }, + { + "epoch": 1.9276952872821176, + "grad_norm": 1.201539567133626, + "learning_rate": 0.0002866756414852929, + "loss": 2.4522, + "step": 2986 + }, + { + "epoch": 1.9283408650742415, + "grad_norm": 1.153027359534764, + "learning_rate": 0.0002866490091388799, + "loss": 2.2606, + "step": 2987 + }, + { + "epoch": 1.9289864428663654, + "grad_norm": 1.1468985538172438, + "learning_rate": 0.000286622351442657, + "loss": 2.3729, + "step": 2988 + }, + { + "epoch": 1.9296320206584894, + "grad_norm": 1.1993073160328356, + "learning_rate": 0.0002865956684015694, + "loss": 2.487, + "step": 2989 + }, + { + "epoch": 1.9302775984506133, + "grad_norm": 1.0674909209588364, + "learning_rate": 0.0002865689600205672, + "loss": 2.3172, + "step": 2990 + }, + { + "epoch": 1.9309231762427372, + "grad_norm": 1.1146305817401294, + "learning_rate": 0.00028654222630460487, + "loss": 2.206, + "step": 2991 + }, + { + "epoch": 1.9315687540348612, + "grad_norm": 1.0635445001135169, + "learning_rate": 0.00028651546725864203, + "loss": 2.298, + "step": 2992 + }, + { + "epoch": 1.932214331826985, + "grad_norm": 1.0144630875089162, + "learning_rate": 0.00028648868288764254, + "loss": 1.9193, + "step": 2993 + }, + { + "epoch": 1.932859909619109, + "grad_norm": 1.1141652480042117, + "learning_rate": 0.0002864618731965754, + "loss": 2.1932, + "step": 2994 + }, + { + "epoch": 1.933505487411233, + "grad_norm": 1.0716218511628568, + "learning_rate": 0.0002864350381904139, + "loss": 2.3586, + "step": 2995 + }, + { + "epoch": 1.934151065203357, + "grad_norm": 1.0741362618492778, + "learning_rate": 0.00028640817787413636, + "loss": 2.211, + "step": 2996 + }, + { + "epoch": 1.9347966429954808, + "grad_norm": 1.0857896532654348, + "learning_rate": 0.0002863812922527255, + "loss": 2.2731, + "step": 2997 + }, + { + "epoch": 1.9354422207876048, + "grad_norm": 1.1924598545916054, + "learning_rate": 0.000286354381331169, + "loss": 2.4941, + "step": 2998 + }, + { + "epoch": 1.936087798579729, + "grad_norm": 1.1452454385636621, + "learning_rate": 0.0002863274451144591, + "loss": 2.3139, + "step": 2999 + }, + { + "epoch": 1.9367333763718528, + "grad_norm": 1.1685324485294546, + "learning_rate": 0.00028630048360759263, + "loss": 2.2595, + "step": 3000 + }, + { + "epoch": 1.9373789541639768, + "grad_norm": 1.2483200453929753, + "learning_rate": 0.0002862734968155714, + "loss": 2.6949, + "step": 3001 + }, + { + "epoch": 1.9380245319561007, + "grad_norm": 1.1562613877807655, + "learning_rate": 0.0002862464847434017, + "loss": 2.3427, + "step": 3002 + }, + { + "epoch": 1.9386701097482246, + "grad_norm": 1.0778370333720093, + "learning_rate": 0.0002862194473960944, + "loss": 2.2324, + "step": 3003 + }, + { + "epoch": 1.9393156875403488, + "grad_norm": 1.0723292980883303, + "learning_rate": 0.00028619238477866536, + "loss": 2.324, + "step": 3004 + }, + { + "epoch": 1.9399612653324727, + "grad_norm": 0.9545959399401799, + "learning_rate": 0.00028616529689613496, + "loss": 2.1378, + "step": 3005 + }, + { + "epoch": 1.9406068431245966, + "grad_norm": 1.1260718291768788, + "learning_rate": 0.00028613818375352825, + "loss": 2.2864, + "step": 3006 + }, + { + "epoch": 1.9412524209167206, + "grad_norm": 1.1387532651324421, + "learning_rate": 0.000286111045355875, + "loss": 2.3066, + "step": 3007 + }, + { + "epoch": 1.9418979987088445, + "grad_norm": 1.0530926148228983, + "learning_rate": 0.0002860838817082097, + "loss": 2.2408, + "step": 3008 + }, + { + "epoch": 1.9425435765009684, + "grad_norm": 1.2565746795479509, + "learning_rate": 0.0002860566928155714, + "loss": 2.3599, + "step": 3009 + }, + { + "epoch": 1.9431891542930924, + "grad_norm": 1.1543587630428473, + "learning_rate": 0.000286029478683004, + "loss": 2.2028, + "step": 3010 + }, + { + "epoch": 1.9438347320852163, + "grad_norm": 1.0180064077341218, + "learning_rate": 0.000286002239315556, + "loss": 1.9517, + "step": 3011 + }, + { + "epoch": 1.9444803098773402, + "grad_norm": 1.181395193231065, + "learning_rate": 0.0002859749747182806, + "loss": 2.0162, + "step": 3012 + }, + { + "epoch": 1.9451258876694641, + "grad_norm": 1.105598052381318, + "learning_rate": 0.00028594768489623564, + "loss": 2.2522, + "step": 3013 + }, + { + "epoch": 1.945771465461588, + "grad_norm": 1.1375949307823523, + "learning_rate": 0.0002859203698544837, + "loss": 2.2955, + "step": 3014 + }, + { + "epoch": 1.946417043253712, + "grad_norm": 1.165569164971814, + "learning_rate": 0.00028589302959809184, + "loss": 2.5289, + "step": 3015 + }, + { + "epoch": 1.947062621045836, + "grad_norm": 1.1724146386889154, + "learning_rate": 0.0002858656641321322, + "loss": 2.4498, + "step": 3016 + }, + { + "epoch": 1.9477081988379599, + "grad_norm": 1.0838895861053295, + "learning_rate": 0.0002858382734616812, + "loss": 2.1438, + "step": 3017 + }, + { + "epoch": 1.9483537766300838, + "grad_norm": 1.1029153959362006, + "learning_rate": 0.00028581085759182015, + "loss": 2.1672, + "step": 3018 + }, + { + "epoch": 1.9489993544222077, + "grad_norm": 1.2576843090166927, + "learning_rate": 0.000285783416527635, + "loss": 2.4278, + "step": 3019 + }, + { + "epoch": 1.9496449322143317, + "grad_norm": 1.0517400797908831, + "learning_rate": 0.0002857559502742163, + "loss": 2.2353, + "step": 3020 + }, + { + "epoch": 1.9502905100064558, + "grad_norm": 1.0872438050800828, + "learning_rate": 0.0002857284588366593, + "loss": 2.3099, + "step": 3021 + }, + { + "epoch": 1.9509360877985797, + "grad_norm": 1.1848242330804524, + "learning_rate": 0.0002857009422200641, + "loss": 1.9607, + "step": 3022 + }, + { + "epoch": 1.9515816655907037, + "grad_norm": 1.076736786415965, + "learning_rate": 0.0002856734004295352, + "loss": 1.9273, + "step": 3023 + }, + { + "epoch": 1.9522272433828276, + "grad_norm": 1.0724404861716152, + "learning_rate": 0.0002856458334701818, + "loss": 2.013, + "step": 3024 + }, + { + "epoch": 1.9528728211749515, + "grad_norm": 1.1792300842881638, + "learning_rate": 0.00028561824134711803, + "loss": 2.539, + "step": 3025 + }, + { + "epoch": 1.9535183989670757, + "grad_norm": 1.228762381656955, + "learning_rate": 0.0002855906240654624, + "loss": 2.518, + "step": 3026 + }, + { + "epoch": 1.9541639767591996, + "grad_norm": 1.2745114297037365, + "learning_rate": 0.00028556298163033827, + "loss": 2.4366, + "step": 3027 + }, + { + "epoch": 1.9548095545513235, + "grad_norm": 1.230943671513198, + "learning_rate": 0.0002855353140468736, + "loss": 2.2802, + "step": 3028 + }, + { + "epoch": 1.9554551323434475, + "grad_norm": 1.14047956863346, + "learning_rate": 0.00028550762132020097, + "loss": 2.3278, + "step": 3029 + }, + { + "epoch": 1.9561007101355714, + "grad_norm": 1.1792992590181022, + "learning_rate": 0.0002854799034554577, + "loss": 2.4081, + "step": 3030 + }, + { + "epoch": 1.9567462879276953, + "grad_norm": 1.133138855975102, + "learning_rate": 0.0002854521604577857, + "loss": 2.2958, + "step": 3031 + }, + { + "epoch": 1.9573918657198193, + "grad_norm": 1.1499861103981872, + "learning_rate": 0.0002854243923323316, + "loss": 2.063, + "step": 3032 + }, + { + "epoch": 1.9580374435119432, + "grad_norm": 1.2050983939273567, + "learning_rate": 0.0002853965990842467, + "loss": 2.3461, + "step": 3033 + }, + { + "epoch": 1.9586830213040671, + "grad_norm": 1.2122962620722864, + "learning_rate": 0.00028536878071868695, + "loss": 2.2564, + "step": 3034 + }, + { + "epoch": 1.959328599096191, + "grad_norm": 1.2289740341593192, + "learning_rate": 0.0002853409372408129, + "loss": 2.2876, + "step": 3035 + }, + { + "epoch": 1.959974176888315, + "grad_norm": 1.2082564558993552, + "learning_rate": 0.00028531306865578986, + "loss": 2.3451, + "step": 3036 + }, + { + "epoch": 1.960619754680439, + "grad_norm": 1.0980081447182668, + "learning_rate": 0.00028528517496878764, + "loss": 2.343, + "step": 3037 + }, + { + "epoch": 1.9612653324725629, + "grad_norm": 1.0711964579109345, + "learning_rate": 0.00028525725618498097, + "loss": 2.3148, + "step": 3038 + }, + { + "epoch": 1.9619109102646868, + "grad_norm": 1.2544102804649797, + "learning_rate": 0.0002852293123095489, + "loss": 2.3169, + "step": 3039 + }, + { + "epoch": 1.9625564880568107, + "grad_norm": 1.195847525941894, + "learning_rate": 0.0002852013433476754, + "loss": 2.2795, + "step": 3040 + }, + { + "epoch": 1.9632020658489346, + "grad_norm": 1.086639871462907, + "learning_rate": 0.000285173349304549, + "loss": 2.1721, + "step": 3041 + }, + { + "epoch": 1.9638476436410588, + "grad_norm": 1.1254667018906677, + "learning_rate": 0.0002851453301853628, + "loss": 2.3938, + "step": 3042 + }, + { + "epoch": 1.9644932214331827, + "grad_norm": 1.179653020666179, + "learning_rate": 0.0002851172859953148, + "loss": 2.2991, + "step": 3043 + }, + { + "epoch": 1.9651387992253067, + "grad_norm": 1.1799195370497018, + "learning_rate": 0.0002850892167396074, + "loss": 2.391, + "step": 3044 + }, + { + "epoch": 1.9657843770174306, + "grad_norm": 1.0777337870800787, + "learning_rate": 0.00028506112242344776, + "loss": 2.3393, + "step": 3045 + }, + { + "epoch": 1.9664299548095545, + "grad_norm": 1.1829135815959602, + "learning_rate": 0.00028503300305204757, + "loss": 2.488, + "step": 3046 + }, + { + "epoch": 1.9670755326016787, + "grad_norm": 1.2012453887346168, + "learning_rate": 0.00028500485863062347, + "loss": 2.4207, + "step": 3047 + }, + { + "epoch": 1.9677211103938026, + "grad_norm": 1.0690581852343528, + "learning_rate": 0.0002849766891643963, + "loss": 1.9899, + "step": 3048 + }, + { + "epoch": 1.9683666881859265, + "grad_norm": 1.0908696294418456, + "learning_rate": 0.00028494849465859196, + "loss": 2.0815, + "step": 3049 + }, + { + "epoch": 1.9690122659780505, + "grad_norm": 1.2425881201783284, + "learning_rate": 0.00028492027511844075, + "loss": 2.4153, + "step": 3050 + }, + { + "epoch": 1.9696578437701744, + "grad_norm": 1.099895748528611, + "learning_rate": 0.0002848920305491777, + "loss": 2.3912, + "step": 3051 + }, + { + "epoch": 1.9703034215622983, + "grad_norm": 1.167927339569317, + "learning_rate": 0.0002848637609560425, + "loss": 2.4042, + "step": 3052 + }, + { + "epoch": 1.9709489993544222, + "grad_norm": 1.1730285179991324, + "learning_rate": 0.0002848354663442794, + "loss": 2.3675, + "step": 3053 + }, + { + "epoch": 1.9715945771465462, + "grad_norm": 1.1427576146732616, + "learning_rate": 0.0002848071467191373, + "loss": 2.2058, + "step": 3054 + }, + { + "epoch": 1.97224015493867, + "grad_norm": 1.1473530001235086, + "learning_rate": 0.00028477880208587, + "loss": 2.2543, + "step": 3055 + }, + { + "epoch": 1.972885732730794, + "grad_norm": 1.1252380951970227, + "learning_rate": 0.00028475043244973554, + "loss": 2.4875, + "step": 3056 + }, + { + "epoch": 1.973531310522918, + "grad_norm": 1.1081800595744167, + "learning_rate": 0.0002847220378159968, + "loss": 2.1545, + "step": 3057 + }, + { + "epoch": 1.974176888315042, + "grad_norm": 1.242503418018209, + "learning_rate": 0.0002846936181899213, + "loss": 2.2473, + "step": 3058 + }, + { + "epoch": 1.9748224661071658, + "grad_norm": 1.1169087927878647, + "learning_rate": 0.0002846651735767812, + "loss": 2.2651, + "step": 3059 + }, + { + "epoch": 1.9754680438992898, + "grad_norm": 0.9401373480067702, + "learning_rate": 0.00028463670398185323, + "loss": 1.9742, + "step": 3060 + }, + { + "epoch": 1.9761136216914137, + "grad_norm": 1.3925949391895873, + "learning_rate": 0.00028460820941041885, + "loss": 2.346, + "step": 3061 + }, + { + "epoch": 1.9767591994835376, + "grad_norm": 1.1515360749768278, + "learning_rate": 0.000284579689867764, + "loss": 2.27, + "step": 3062 + }, + { + "epoch": 1.9774047772756616, + "grad_norm": 1.1457196401277845, + "learning_rate": 0.00028455114535917947, + "loss": 2.3152, + "step": 3063 + }, + { + "epoch": 1.9780503550677857, + "grad_norm": 1.0679201591546952, + "learning_rate": 0.0002845225758899605, + "loss": 2.1266, + "step": 3064 + }, + { + "epoch": 1.9786959328599096, + "grad_norm": 1.1153403241590125, + "learning_rate": 0.00028449398146540697, + "loss": 2.4296, + "step": 3065 + }, + { + "epoch": 1.9793415106520336, + "grad_norm": 1.22591227709418, + "learning_rate": 0.0002844653620908236, + "loss": 2.3029, + "step": 3066 + }, + { + "epoch": 1.9799870884441575, + "grad_norm": 1.1240096515208657, + "learning_rate": 0.0002844367177715194, + "loss": 2.2008, + "step": 3067 + }, + { + "epoch": 1.9806326662362814, + "grad_norm": 1.0974429657238798, + "learning_rate": 0.00028440804851280835, + "loss": 2.4276, + "step": 3068 + }, + { + "epoch": 1.9812782440284056, + "grad_norm": 1.0166655689506114, + "learning_rate": 0.0002843793543200088, + "loss": 2.4116, + "step": 3069 + }, + { + "epoch": 1.9819238218205295, + "grad_norm": 1.0424191271307315, + "learning_rate": 0.0002843506351984438, + "loss": 2.2573, + "step": 3070 + }, + { + "epoch": 1.9825693996126534, + "grad_norm": 1.1242089743540689, + "learning_rate": 0.0002843218911534411, + "loss": 2.3601, + "step": 3071 + }, + { + "epoch": 1.9832149774047774, + "grad_norm": 1.1317007551902833, + "learning_rate": 0.00028429312219033307, + "loss": 2.1925, + "step": 3072 + }, + { + "epoch": 1.9838605551969013, + "grad_norm": 1.167234577821905, + "learning_rate": 0.0002842643283144565, + "loss": 2.1677, + "step": 3073 + }, + { + "epoch": 1.9845061329890252, + "grad_norm": 1.0929503158340061, + "learning_rate": 0.0002842355095311531, + "loss": 2.2922, + "step": 3074 + }, + { + "epoch": 1.9851517107811492, + "grad_norm": 1.1299982708443088, + "learning_rate": 0.0002842066658457689, + "loss": 2.3637, + "step": 3075 + }, + { + "epoch": 1.985797288573273, + "grad_norm": 1.1627644353918785, + "learning_rate": 0.0002841777972636549, + "loss": 2.3995, + "step": 3076 + }, + { + "epoch": 1.986442866365397, + "grad_norm": 1.0571849548317673, + "learning_rate": 0.00028414890379016637, + "loss": 1.9342, + "step": 3077 + }, + { + "epoch": 1.987088444157521, + "grad_norm": 1.0367009359620978, + "learning_rate": 0.00028411998543066333, + "loss": 2.1741, + "step": 3078 + }, + { + "epoch": 1.9877340219496449, + "grad_norm": 1.2030901709969317, + "learning_rate": 0.00028409104219051057, + "loss": 2.1808, + "step": 3079 + }, + { + "epoch": 1.9883795997417688, + "grad_norm": 1.1673430111673913, + "learning_rate": 0.0002840620740750773, + "loss": 2.0904, + "step": 3080 + }, + { + "epoch": 1.9890251775338927, + "grad_norm": 1.075326609900047, + "learning_rate": 0.0002840330810897374, + "loss": 2.4522, + "step": 3081 + }, + { + "epoch": 1.9896707553260167, + "grad_norm": 1.1518187688966077, + "learning_rate": 0.00028400406323986937, + "loss": 1.8672, + "step": 3082 + }, + { + "epoch": 1.9903163331181406, + "grad_norm": 1.0809447091458397, + "learning_rate": 0.0002839750205308563, + "loss": 2.385, + "step": 3083 + }, + { + "epoch": 1.9909619109102645, + "grad_norm": 1.0850567750578581, + "learning_rate": 0.0002839459529680859, + "loss": 2.3002, + "step": 3084 + }, + { + "epoch": 1.9916074887023887, + "grad_norm": 1.1054151863722272, + "learning_rate": 0.0002839168605569506, + "loss": 1.8944, + "step": 3085 + }, + { + "epoch": 1.9922530664945126, + "grad_norm": 1.254476824742911, + "learning_rate": 0.00028388774330284725, + "loss": 2.3871, + "step": 3086 + }, + { + "epoch": 1.9928986442866365, + "grad_norm": 1.1928484121352008, + "learning_rate": 0.0002838586012111775, + "loss": 2.4059, + "step": 3087 + }, + { + "epoch": 1.9935442220787605, + "grad_norm": 1.1948667173089573, + "learning_rate": 0.0002838294342873474, + "loss": 2.3412, + "step": 3088 + }, + { + "epoch": 1.9941897998708844, + "grad_norm": 1.1357850508502714, + "learning_rate": 0.00028380024253676776, + "loss": 2.3757, + "step": 3089 + }, + { + "epoch": 1.9948353776630086, + "grad_norm": 1.0936665200813112, + "learning_rate": 0.00028377102596485396, + "loss": 2.2676, + "step": 3090 + }, + { + "epoch": 1.9954809554551325, + "grad_norm": 1.1071052104747887, + "learning_rate": 0.00028374178457702597, + "loss": 2.163, + "step": 3091 + }, + { + "epoch": 1.9961265332472564, + "grad_norm": 1.1770521425055223, + "learning_rate": 0.00028371251837870843, + "loss": 2.328, + "step": 3092 + }, + { + "epoch": 1.9967721110393803, + "grad_norm": 1.130903887212711, + "learning_rate": 0.0002836832273753304, + "loss": 2.3586, + "step": 3093 + }, + { + "epoch": 1.9974176888315043, + "grad_norm": 1.1259324691344612, + "learning_rate": 0.00028365391157232574, + "loss": 2.3501, + "step": 3094 + }, + { + "epoch": 1.9980632666236282, + "grad_norm": 1.1228053493093728, + "learning_rate": 0.00028362457097513285, + "loss": 2.5668, + "step": 3095 + }, + { + "epoch": 1.9987088444157521, + "grad_norm": 1.0632354794799093, + "learning_rate": 0.0002835952055891947, + "loss": 2.3075, + "step": 3096 + }, + { + "epoch": 1.999354422207876, + "grad_norm": 1.0901706461765477, + "learning_rate": 0.0002835658154199589, + "loss": 2.1795, + "step": 3097 + }, + { + "epoch": 2.0, + "grad_norm": 1.1148302250150586, + "learning_rate": 0.00028353640047287755, + "loss": 2.214, + "step": 3098 + }, + { + "epoch": 2.0, + "eval_loss": 2.2155704498291016, + "eval_runtime": 58.3455, + "eval_samples_per_second": 5.947, + "eval_steps_per_second": 5.947, + "step": 3098 + }, + { + "epoch": 2.000645577792124, + "grad_norm": 1.1194032682556765, + "learning_rate": 0.0002835069607534075, + "loss": 2.0529, + "step": 3099 + }, + { + "epoch": 2.001291155584248, + "grad_norm": 1.1550209355315553, + "learning_rate": 0.00028347749626701015, + "loss": 2.2025, + "step": 3100 + }, + { + "epoch": 2.001936733376372, + "grad_norm": 1.078946233968094, + "learning_rate": 0.0002834480070191513, + "loss": 2.0026, + "step": 3101 + }, + { + "epoch": 2.0025823111684957, + "grad_norm": 1.0625440324399633, + "learning_rate": 0.0002834184930153017, + "loss": 2.0851, + "step": 3102 + }, + { + "epoch": 2.0032278889606197, + "grad_norm": 1.0722145751954502, + "learning_rate": 0.00028338895426093647, + "loss": 1.6073, + "step": 3103 + }, + { + "epoch": 2.0038734667527436, + "grad_norm": 1.1280925069070333, + "learning_rate": 0.00028335939076153533, + "loss": 1.855, + "step": 3104 + }, + { + "epoch": 2.0045190445448675, + "grad_norm": 1.31649971943016, + "learning_rate": 0.0002833298025225826, + "loss": 1.5405, + "step": 3105 + }, + { + "epoch": 2.0051646223369914, + "grad_norm": 1.0739972545399732, + "learning_rate": 0.0002833001895495672, + "loss": 1.9429, + "step": 3106 + }, + { + "epoch": 2.0058102001291154, + "grad_norm": 1.2162695222133806, + "learning_rate": 0.00028327055184798264, + "loss": 2.0325, + "step": 3107 + }, + { + "epoch": 2.0064557779212393, + "grad_norm": 1.3580690508648814, + "learning_rate": 0.00028324088942332707, + "loss": 2.4162, + "step": 3108 + }, + { + "epoch": 2.0071013557133637, + "grad_norm": 1.2082787672456785, + "learning_rate": 0.0002832112022811031, + "loss": 2.215, + "step": 3109 + }, + { + "epoch": 2.0077469335054876, + "grad_norm": 1.2195495749958802, + "learning_rate": 0.0002831814904268181, + "loss": 2.1913, + "step": 3110 + }, + { + "epoch": 2.0083925112976115, + "grad_norm": 1.269812410343274, + "learning_rate": 0.00028315175386598384, + "loss": 2.3855, + "step": 3111 + }, + { + "epoch": 2.0090380890897355, + "grad_norm": 1.063347551030408, + "learning_rate": 0.0002831219926041168, + "loss": 2.1321, + "step": 3112 + }, + { + "epoch": 2.0096836668818594, + "grad_norm": 1.1138843928809996, + "learning_rate": 0.00028309220664673806, + "loss": 2.1392, + "step": 3113 + }, + { + "epoch": 2.0103292446739833, + "grad_norm": 1.0542455263362187, + "learning_rate": 0.00028306239599937315, + "loss": 2.0573, + "step": 3114 + }, + { + "epoch": 2.0109748224661073, + "grad_norm": 1.089995464647484, + "learning_rate": 0.0002830325606675522, + "loss": 1.9671, + "step": 3115 + }, + { + "epoch": 2.011620400258231, + "grad_norm": 1.228935086020962, + "learning_rate": 0.0002830027006568101, + "loss": 2.4506, + "step": 3116 + }, + { + "epoch": 2.012265978050355, + "grad_norm": 1.0666615103389532, + "learning_rate": 0.00028297281597268616, + "loss": 2.0866, + "step": 3117 + }, + { + "epoch": 2.012911555842479, + "grad_norm": 1.282469549955992, + "learning_rate": 0.00028294290662072424, + "loss": 2.2855, + "step": 3118 + }, + { + "epoch": 2.013557133634603, + "grad_norm": 1.1198335346854371, + "learning_rate": 0.0002829129726064729, + "loss": 1.9003, + "step": 3119 + }, + { + "epoch": 2.014202711426727, + "grad_norm": 1.1245610503513013, + "learning_rate": 0.0002828830139354852, + "loss": 1.9849, + "step": 3120 + }, + { + "epoch": 2.014848289218851, + "grad_norm": 0.9993058330620764, + "learning_rate": 0.0002828530306133187, + "loss": 1.851, + "step": 3121 + }, + { + "epoch": 2.0154938670109748, + "grad_norm": 1.07867496716578, + "learning_rate": 0.0002828230226455357, + "loss": 1.8292, + "step": 3122 + }, + { + "epoch": 2.0161394448030987, + "grad_norm": 1.1280688534215433, + "learning_rate": 0.000282792990037703, + "loss": 2.2153, + "step": 3123 + }, + { + "epoch": 2.0167850225952226, + "grad_norm": 1.1283508731435925, + "learning_rate": 0.00028276293279539186, + "loss": 2.2889, + "step": 3124 + }, + { + "epoch": 2.0174306003873466, + "grad_norm": 1.1405155038479624, + "learning_rate": 0.0002827328509241784, + "loss": 2.0702, + "step": 3125 + }, + { + "epoch": 2.0180761781794705, + "grad_norm": 1.1898482970948858, + "learning_rate": 0.00028270274442964295, + "loss": 2.1229, + "step": 3126 + }, + { + "epoch": 2.0187217559715944, + "grad_norm": 1.117322805466497, + "learning_rate": 0.0002826726133173706, + "loss": 2.0798, + "step": 3127 + }, + { + "epoch": 2.0193673337637184, + "grad_norm": 1.0415159260265157, + "learning_rate": 0.0002826424575929511, + "loss": 2.0224, + "step": 3128 + }, + { + "epoch": 2.0200129115558423, + "grad_norm": 1.1945522741568015, + "learning_rate": 0.00028261227726197847, + "loss": 2.293, + "step": 3129 + }, + { + "epoch": 2.020658489347966, + "grad_norm": 1.1282489262107114, + "learning_rate": 0.00028258207233005164, + "loss": 2.136, + "step": 3130 + }, + { + "epoch": 2.0213040671400906, + "grad_norm": 1.0323402438780278, + "learning_rate": 0.00028255184280277383, + "loss": 1.7312, + "step": 3131 + }, + { + "epoch": 2.0219496449322145, + "grad_norm": 1.036276053402538, + "learning_rate": 0.00028252158868575296, + "loss": 2.0574, + "step": 3132 + }, + { + "epoch": 2.0225952227243384, + "grad_norm": 1.2581383756614666, + "learning_rate": 0.0002824913099846015, + "loss": 2.1878, + "step": 3133 + }, + { + "epoch": 2.0232408005164624, + "grad_norm": 1.111201789462494, + "learning_rate": 0.00028246100670493654, + "loss": 2.1243, + "step": 3134 + }, + { + "epoch": 2.0238863783085863, + "grad_norm": 1.0644240374583447, + "learning_rate": 0.0002824306788523795, + "loss": 1.9174, + "step": 3135 + }, + { + "epoch": 2.0245319561007102, + "grad_norm": 1.1871662877910063, + "learning_rate": 0.0002824003264325566, + "loss": 2.2901, + "step": 3136 + }, + { + "epoch": 2.025177533892834, + "grad_norm": 1.162584709285058, + "learning_rate": 0.00028236994945109845, + "loss": 2.0269, + "step": 3137 + }, + { + "epoch": 2.025823111684958, + "grad_norm": 1.1476789448993086, + "learning_rate": 0.0002823395479136404, + "loss": 2.0284, + "step": 3138 + }, + { + "epoch": 2.026468689477082, + "grad_norm": 1.1693790422995498, + "learning_rate": 0.00028230912182582223, + "loss": 2.2089, + "step": 3139 + }, + { + "epoch": 2.027114267269206, + "grad_norm": 1.158202626703693, + "learning_rate": 0.0002822786711932882, + "loss": 1.9891, + "step": 3140 + }, + { + "epoch": 2.02775984506133, + "grad_norm": 0.9896986350170325, + "learning_rate": 0.00028224819602168735, + "loss": 2.0135, + "step": 3141 + }, + { + "epoch": 2.028405422853454, + "grad_norm": 1.1630226060328412, + "learning_rate": 0.0002822176963166731, + "loss": 2.2425, + "step": 3142 + }, + { + "epoch": 2.0290510006455778, + "grad_norm": 1.0768609426603017, + "learning_rate": 0.0002821871720839034, + "loss": 1.7983, + "step": 3143 + }, + { + "epoch": 2.0296965784377017, + "grad_norm": 1.2334963347260879, + "learning_rate": 0.0002821566233290408, + "loss": 2.2401, + "step": 3144 + }, + { + "epoch": 2.0303421562298256, + "grad_norm": 1.204882459572904, + "learning_rate": 0.00028212605005775254, + "loss": 1.821, + "step": 3145 + }, + { + "epoch": 2.0309877340219495, + "grad_norm": 1.0972088204615782, + "learning_rate": 0.0002820954522757102, + "loss": 2.0648, + "step": 3146 + }, + { + "epoch": 2.0316333118140735, + "grad_norm": 1.2152766660771357, + "learning_rate": 0.00028206482998858996, + "loss": 2.1089, + "step": 3147 + }, + { + "epoch": 2.0322788896061974, + "grad_norm": 1.215596866309249, + "learning_rate": 0.00028203418320207267, + "loss": 2.3905, + "step": 3148 + }, + { + "epoch": 2.0329244673983213, + "grad_norm": 1.1258379783320664, + "learning_rate": 0.0002820035119218435, + "loss": 2.2429, + "step": 3149 + }, + { + "epoch": 2.0335700451904453, + "grad_norm": 1.122442450954909, + "learning_rate": 0.00028197281615359237, + "loss": 2.036, + "step": 3150 + }, + { + "epoch": 2.034215622982569, + "grad_norm": 1.1626142471660628, + "learning_rate": 0.00028194209590301364, + "loss": 2.0476, + "step": 3151 + }, + { + "epoch": 2.0348612007746936, + "grad_norm": 1.249902565939622, + "learning_rate": 0.0002819113511758063, + "loss": 2.3245, + "step": 3152 + }, + { + "epoch": 2.0355067785668175, + "grad_norm": 1.4362641064189605, + "learning_rate": 0.00028188058197767366, + "loss": 1.408, + "step": 3153 + }, + { + "epoch": 2.0361523563589414, + "grad_norm": 1.129085322212487, + "learning_rate": 0.00028184978831432384, + "loss": 2.1005, + "step": 3154 + }, + { + "epoch": 2.0367979341510654, + "grad_norm": 1.149632804184819, + "learning_rate": 0.0002818189701914694, + "loss": 2.1933, + "step": 3155 + }, + { + "epoch": 2.0374435119431893, + "grad_norm": 1.1628875780473356, + "learning_rate": 0.0002817881276148274, + "loss": 2.1144, + "step": 3156 + }, + { + "epoch": 2.038089089735313, + "grad_norm": 1.2310013019724748, + "learning_rate": 0.0002817572605901194, + "loss": 2.3277, + "step": 3157 + }, + { + "epoch": 2.038734667527437, + "grad_norm": 1.1696537670337261, + "learning_rate": 0.0002817263691230716, + "loss": 1.9989, + "step": 3158 + }, + { + "epoch": 2.039380245319561, + "grad_norm": 1.1295182246352704, + "learning_rate": 0.00028169545321941474, + "loss": 1.791, + "step": 3159 + }, + { + "epoch": 2.040025823111685, + "grad_norm": 1.2944477594145323, + "learning_rate": 0.00028166451288488397, + "loss": 2.1167, + "step": 3160 + }, + { + "epoch": 2.040671400903809, + "grad_norm": 1.1652123741714826, + "learning_rate": 0.00028163354812521903, + "loss": 1.9281, + "step": 3161 + }, + { + "epoch": 2.041316978695933, + "grad_norm": 1.2031890540133279, + "learning_rate": 0.0002816025589461643, + "loss": 2.1068, + "step": 3162 + }, + { + "epoch": 2.041962556488057, + "grad_norm": 1.181607637708003, + "learning_rate": 0.0002815715453534685, + "loss": 2.3408, + "step": 3163 + }, + { + "epoch": 2.0426081342801807, + "grad_norm": 1.1454045904302923, + "learning_rate": 0.00028154050735288496, + "loss": 2.0486, + "step": 3164 + }, + { + "epoch": 2.0432537120723047, + "grad_norm": 1.0928712900142572, + "learning_rate": 0.00028150944495017164, + "loss": 1.9192, + "step": 3165 + }, + { + "epoch": 2.0438992898644286, + "grad_norm": 1.029185214595003, + "learning_rate": 0.0002814783581510909, + "loss": 2.0706, + "step": 3166 + }, + { + "epoch": 2.0445448676565525, + "grad_norm": 1.14508403064423, + "learning_rate": 0.00028144724696140965, + "loss": 2.0103, + "step": 3167 + }, + { + "epoch": 2.0451904454486765, + "grad_norm": 1.097031011184657, + "learning_rate": 0.0002814161113868993, + "loss": 1.9854, + "step": 3168 + }, + { + "epoch": 2.0458360232408004, + "grad_norm": 1.1095326085934896, + "learning_rate": 0.00028138495143333595, + "loss": 2.2003, + "step": 3169 + }, + { + "epoch": 2.0464816010329243, + "grad_norm": 1.2661611128913117, + "learning_rate": 0.0002813537671065, + "loss": 2.3896, + "step": 3170 + }, + { + "epoch": 2.0471271788250482, + "grad_norm": 1.0940239365550704, + "learning_rate": 0.0002813225584121764, + "loss": 2.2456, + "step": 3171 + }, + { + "epoch": 2.047772756617172, + "grad_norm": 1.131770882862933, + "learning_rate": 0.00028129132535615486, + "loss": 2.288, + "step": 3172 + }, + { + "epoch": 2.048418334409296, + "grad_norm": 1.1596260942936694, + "learning_rate": 0.00028126006794422926, + "loss": 2.0126, + "step": 3173 + }, + { + "epoch": 2.0490639122014205, + "grad_norm": 1.2224085970703171, + "learning_rate": 0.00028122878618219835, + "loss": 2.2663, + "step": 3174 + }, + { + "epoch": 2.0497094899935444, + "grad_norm": 1.338036959706292, + "learning_rate": 0.0002811974800758651, + "loss": 1.9521, + "step": 3175 + }, + { + "epoch": 2.0503550677856683, + "grad_norm": 1.2546518046022113, + "learning_rate": 0.0002811661496310371, + "loss": 2.2323, + "step": 3176 + }, + { + "epoch": 2.0510006455777923, + "grad_norm": 1.1865065170847486, + "learning_rate": 0.0002811347948535266, + "loss": 2.0341, + "step": 3177 + }, + { + "epoch": 2.051646223369916, + "grad_norm": 1.058982133304418, + "learning_rate": 0.00028110341574915005, + "loss": 1.5964, + "step": 3178 + }, + { + "epoch": 2.05229180116204, + "grad_norm": 1.2035634409391116, + "learning_rate": 0.0002810720123237288, + "loss": 2.1795, + "step": 3179 + }, + { + "epoch": 2.052937378954164, + "grad_norm": 1.1296923876632519, + "learning_rate": 0.00028104058458308835, + "loss": 2.0476, + "step": 3180 + }, + { + "epoch": 2.053582956746288, + "grad_norm": 1.220549953849265, + "learning_rate": 0.000281009132533059, + "loss": 2.1034, + "step": 3181 + }, + { + "epoch": 2.054228534538412, + "grad_norm": 1.2052110957414477, + "learning_rate": 0.00028097765617947536, + "loss": 2.1349, + "step": 3182 + }, + { + "epoch": 2.054874112330536, + "grad_norm": 1.162665859100324, + "learning_rate": 0.00028094615552817656, + "loss": 2.0871, + "step": 3183 + }, + { + "epoch": 2.05551969012266, + "grad_norm": 1.1853846197348505, + "learning_rate": 0.00028091463058500647, + "loss": 1.6563, + "step": 3184 + }, + { + "epoch": 2.0561652679147837, + "grad_norm": 1.0937254896330668, + "learning_rate": 0.0002808830813558132, + "loss": 1.9332, + "step": 3185 + }, + { + "epoch": 2.0568108457069076, + "grad_norm": 1.1327028931370096, + "learning_rate": 0.0002808515078464494, + "loss": 2.0703, + "step": 3186 + }, + { + "epoch": 2.0574564234990316, + "grad_norm": 1.192236240126071, + "learning_rate": 0.0002808199100627723, + "loss": 2.2906, + "step": 3187 + }, + { + "epoch": 2.0581020012911555, + "grad_norm": 1.174831344400014, + "learning_rate": 0.00028078828801064373, + "loss": 2.0044, + "step": 3188 + }, + { + "epoch": 2.0587475790832794, + "grad_norm": 1.1281286818367149, + "learning_rate": 0.0002807566416959298, + "loss": 2.212, + "step": 3189 + }, + { + "epoch": 2.0593931568754034, + "grad_norm": 1.1988779123547109, + "learning_rate": 0.0002807249711245013, + "loss": 2.0925, + "step": 3190 + }, + { + "epoch": 2.0600387346675273, + "grad_norm": 1.1442907201295447, + "learning_rate": 0.00028069327630223336, + "loss": 1.7807, + "step": 3191 + }, + { + "epoch": 2.0606843124596512, + "grad_norm": 1.1097285602473645, + "learning_rate": 0.0002806615572350058, + "loss": 2.0434, + "step": 3192 + }, + { + "epoch": 2.061329890251775, + "grad_norm": 1.2429380942058723, + "learning_rate": 0.0002806298139287027, + "loss": 2.2367, + "step": 3193 + }, + { + "epoch": 2.061975468043899, + "grad_norm": 1.0537440067163673, + "learning_rate": 0.00028059804638921296, + "loss": 1.7026, + "step": 3194 + }, + { + "epoch": 2.062621045836023, + "grad_norm": 1.0852668525127807, + "learning_rate": 0.00028056625462242964, + "loss": 1.8976, + "step": 3195 + }, + { + "epoch": 2.0632666236281474, + "grad_norm": 1.140597677609043, + "learning_rate": 0.00028053443863425054, + "loss": 2.0428, + "step": 3196 + }, + { + "epoch": 2.0639122014202713, + "grad_norm": 1.1336321674758492, + "learning_rate": 0.0002805025984305778, + "loss": 1.9283, + "step": 3197 + }, + { + "epoch": 2.0645577792123952, + "grad_norm": 1.2708294591918246, + "learning_rate": 0.00028047073401731807, + "loss": 1.9951, + "step": 3198 + }, + { + "epoch": 2.065203357004519, + "grad_norm": 1.2237242750040556, + "learning_rate": 0.0002804388454003826, + "loss": 2.0067, + "step": 3199 + }, + { + "epoch": 2.065848934796643, + "grad_norm": 1.1731999078816306, + "learning_rate": 0.00028040693258568703, + "loss": 2.1827, + "step": 3200 + }, + { + "epoch": 2.066494512588767, + "grad_norm": 1.2469544508470554, + "learning_rate": 0.0002803749955791515, + "loss": 2.1263, + "step": 3201 + }, + { + "epoch": 2.067140090380891, + "grad_norm": 1.1286120914505864, + "learning_rate": 0.0002803430343867007, + "loss": 2.0444, + "step": 3202 + }, + { + "epoch": 2.067785668173015, + "grad_norm": 1.1476888084663808, + "learning_rate": 0.0002803110490142638, + "loss": 1.9841, + "step": 3203 + }, + { + "epoch": 2.068431245965139, + "grad_norm": 1.2220217465301175, + "learning_rate": 0.00028027903946777435, + "loss": 2.2339, + "step": 3204 + }, + { + "epoch": 2.0690768237572628, + "grad_norm": 1.186519917451983, + "learning_rate": 0.0002802470057531704, + "loss": 2.1808, + "step": 3205 + }, + { + "epoch": 2.0697224015493867, + "grad_norm": 1.187358785037078, + "learning_rate": 0.0002802149478763946, + "loss": 2.1142, + "step": 3206 + }, + { + "epoch": 2.0703679793415106, + "grad_norm": 1.134297325192955, + "learning_rate": 0.0002801828658433941, + "loss": 2.0586, + "step": 3207 + }, + { + "epoch": 2.0710135571336346, + "grad_norm": 1.0958572706820486, + "learning_rate": 0.0002801507596601203, + "loss": 2.0281, + "step": 3208 + }, + { + "epoch": 2.0716591349257585, + "grad_norm": 1.1094471862771458, + "learning_rate": 0.00028011862933252933, + "loss": 2.2383, + "step": 3209 + }, + { + "epoch": 2.0723047127178824, + "grad_norm": 1.1545580831102398, + "learning_rate": 0.0002800864748665817, + "loss": 1.8723, + "step": 3210 + }, + { + "epoch": 2.0729502905100063, + "grad_norm": 1.0917524820828302, + "learning_rate": 0.0002800542962682423, + "loss": 2.005, + "step": 3211 + }, + { + "epoch": 2.0735958683021303, + "grad_norm": 1.1524340288228065, + "learning_rate": 0.00028002209354348074, + "loss": 2.0024, + "step": 3212 + }, + { + "epoch": 2.074241446094254, + "grad_norm": 1.2361362194423546, + "learning_rate": 0.00027998986669827084, + "loss": 2.2399, + "step": 3213 + }, + { + "epoch": 2.074887023886378, + "grad_norm": 1.2163054423954502, + "learning_rate": 0.0002799576157385911, + "loss": 2.1524, + "step": 3214 + }, + { + "epoch": 2.075532601678502, + "grad_norm": 1.2159952623308066, + "learning_rate": 0.0002799253406704243, + "loss": 2.0946, + "step": 3215 + }, + { + "epoch": 2.0761781794706264, + "grad_norm": 1.228185715139549, + "learning_rate": 0.0002798930414997579, + "loss": 2.3777, + "step": 3216 + }, + { + "epoch": 2.0768237572627504, + "grad_norm": 1.1409248183405343, + "learning_rate": 0.00027986071823258364, + "loss": 2.0871, + "step": 3217 + }, + { + "epoch": 2.0774693350548743, + "grad_norm": 1.1286821598089884, + "learning_rate": 0.000279828370874898, + "loss": 2.2183, + "step": 3218 + }, + { + "epoch": 2.0781149128469982, + "grad_norm": 1.2352543318552511, + "learning_rate": 0.00027979599943270153, + "loss": 2.1336, + "step": 3219 + }, + { + "epoch": 2.078760490639122, + "grad_norm": 1.2699299659345022, + "learning_rate": 0.0002797636039119996, + "loss": 2.3388, + "step": 3220 + }, + { + "epoch": 2.079406068431246, + "grad_norm": 1.1538424673923275, + "learning_rate": 0.0002797311843188019, + "loss": 1.9666, + "step": 3221 + }, + { + "epoch": 2.08005164622337, + "grad_norm": 1.1065738973069075, + "learning_rate": 0.00027969874065912257, + "loss": 1.6771, + "step": 3222 + }, + { + "epoch": 2.080697224015494, + "grad_norm": 1.146734600770908, + "learning_rate": 0.0002796662729389802, + "loss": 2.1589, + "step": 3223 + }, + { + "epoch": 2.081342801807618, + "grad_norm": 1.040699967010613, + "learning_rate": 0.00027963378116439803, + "loss": 2.0019, + "step": 3224 + }, + { + "epoch": 2.081988379599742, + "grad_norm": 1.0354934968538938, + "learning_rate": 0.0002796012653414035, + "loss": 1.8379, + "step": 3225 + }, + { + "epoch": 2.0826339573918657, + "grad_norm": 1.2598876730613904, + "learning_rate": 0.0002795687254760287, + "loss": 2.3922, + "step": 3226 + }, + { + "epoch": 2.0832795351839897, + "grad_norm": 1.3502364330276064, + "learning_rate": 0.00027953616157431007, + "loss": 2.1508, + "step": 3227 + }, + { + "epoch": 2.0839251129761136, + "grad_norm": 0.951784214965775, + "learning_rate": 0.0002795035736422886, + "loss": 1.4616, + "step": 3228 + }, + { + "epoch": 2.0845706907682375, + "grad_norm": 1.161060401924211, + "learning_rate": 0.0002794709616860096, + "loss": 2.1007, + "step": 3229 + }, + { + "epoch": 2.0852162685603615, + "grad_norm": 1.2724687443835518, + "learning_rate": 0.000279438325711523, + "loss": 1.8244, + "step": 3230 + }, + { + "epoch": 2.0858618463524854, + "grad_norm": 1.1931431348096617, + "learning_rate": 0.0002794056657248831, + "loss": 2.1534, + "step": 3231 + }, + { + "epoch": 2.0865074241446093, + "grad_norm": 1.2980576355445503, + "learning_rate": 0.00027937298173214865, + "loss": 2.1589, + "step": 3232 + }, + { + "epoch": 2.0871530019367333, + "grad_norm": 1.2772968388429045, + "learning_rate": 0.00027934027373938293, + "loss": 2.1142, + "step": 3233 + }, + { + "epoch": 2.087798579728857, + "grad_norm": 1.1590634335862364, + "learning_rate": 0.00027930754175265355, + "loss": 2.1926, + "step": 3234 + }, + { + "epoch": 2.088444157520981, + "grad_norm": 1.176390807066354, + "learning_rate": 0.00027927478577803263, + "loss": 2.241, + "step": 3235 + }, + { + "epoch": 2.089089735313105, + "grad_norm": 1.158664391273772, + "learning_rate": 0.00027924200582159675, + "loss": 2.149, + "step": 3236 + }, + { + "epoch": 2.089735313105229, + "grad_norm": 1.117969913053552, + "learning_rate": 0.000279209201889427, + "loss": 2.0743, + "step": 3237 + }, + { + "epoch": 2.090380890897353, + "grad_norm": 1.308628514377336, + "learning_rate": 0.00027917637398760874, + "loss": 2.0534, + "step": 3238 + }, + { + "epoch": 2.0910264686894773, + "grad_norm": 1.263624994794256, + "learning_rate": 0.00027914352212223193, + "loss": 2.2427, + "step": 3239 + }, + { + "epoch": 2.091672046481601, + "grad_norm": 1.2083269282024407, + "learning_rate": 0.000279110646299391, + "loss": 2.3754, + "step": 3240 + }, + { + "epoch": 2.092317624273725, + "grad_norm": 1.115416242491856, + "learning_rate": 0.0002790777465251847, + "loss": 1.8807, + "step": 3241 + }, + { + "epoch": 2.092963202065849, + "grad_norm": 0.9913277365558808, + "learning_rate": 0.0002790448228057162, + "loss": 1.7256, + "step": 3242 + }, + { + "epoch": 2.093608779857973, + "grad_norm": 1.2897139417446004, + "learning_rate": 0.00027901187514709337, + "loss": 2.1782, + "step": 3243 + }, + { + "epoch": 2.094254357650097, + "grad_norm": 1.1692674095742353, + "learning_rate": 0.00027897890355542816, + "loss": 1.9159, + "step": 3244 + }, + { + "epoch": 2.094899935442221, + "grad_norm": 1.240992026478898, + "learning_rate": 0.00027894590803683724, + "loss": 2.21, + "step": 3245 + }, + { + "epoch": 2.095545513234345, + "grad_norm": 1.0075347200160578, + "learning_rate": 0.00027891288859744164, + "loss": 1.8307, + "step": 3246 + }, + { + "epoch": 2.0961910910264687, + "grad_norm": 1.176878244479167, + "learning_rate": 0.0002788798452433668, + "loss": 1.9703, + "step": 3247 + }, + { + "epoch": 2.0968366688185927, + "grad_norm": 1.1483426437857318, + "learning_rate": 0.0002788467779807425, + "loss": 1.9856, + "step": 3248 + }, + { + "epoch": 2.0974822466107166, + "grad_norm": 1.1554264249840525, + "learning_rate": 0.0002788136868157032, + "loss": 2.1345, + "step": 3249 + }, + { + "epoch": 2.0981278244028405, + "grad_norm": 1.2018963344120885, + "learning_rate": 0.0002787805717543876, + "loss": 2.2496, + "step": 3250 + }, + { + "epoch": 2.0987734021949644, + "grad_norm": 1.2655144897163892, + "learning_rate": 0.00027874743280293887, + "loss": 2.2226, + "step": 3251 + }, + { + "epoch": 2.0994189799870884, + "grad_norm": 1.2720437393315829, + "learning_rate": 0.0002787142699675046, + "loss": 1.9776, + "step": 3252 + }, + { + "epoch": 2.1000645577792123, + "grad_norm": 1.0685078388172866, + "learning_rate": 0.00027868108325423697, + "loss": 1.9985, + "step": 3253 + }, + { + "epoch": 2.1007101355713362, + "grad_norm": 1.2214763158365336, + "learning_rate": 0.00027864787266929233, + "loss": 2.0488, + "step": 3254 + }, + { + "epoch": 2.10135571336346, + "grad_norm": 1.115301804177332, + "learning_rate": 0.0002786146382188316, + "loss": 1.671, + "step": 3255 + }, + { + "epoch": 2.102001291155584, + "grad_norm": 1.159533871108533, + "learning_rate": 0.00027858137990902017, + "loss": 1.8714, + "step": 3256 + }, + { + "epoch": 2.102646868947708, + "grad_norm": 1.1420451902314173, + "learning_rate": 0.0002785480977460278, + "loss": 2.0469, + "step": 3257 + }, + { + "epoch": 2.103292446739832, + "grad_norm": 1.1553574770943649, + "learning_rate": 0.0002785147917360286, + "loss": 1.7893, + "step": 3258 + }, + { + "epoch": 2.103938024531956, + "grad_norm": 1.3232241002514675, + "learning_rate": 0.00027848146188520133, + "loss": 2.183, + "step": 3259 + }, + { + "epoch": 2.1045836023240803, + "grad_norm": 1.2803808843987865, + "learning_rate": 0.0002784481081997289, + "loss": 2.0213, + "step": 3260 + }, + { + "epoch": 2.105229180116204, + "grad_norm": 1.1755109464911584, + "learning_rate": 0.0002784147306857988, + "loss": 1.8886, + "step": 3261 + }, + { + "epoch": 2.105874757908328, + "grad_norm": 1.1757470796582041, + "learning_rate": 0.00027838132934960284, + "loss": 2.0495, + "step": 3262 + }, + { + "epoch": 2.106520335700452, + "grad_norm": 1.2035230098267398, + "learning_rate": 0.00027834790419733747, + "loss": 2.241, + "step": 3263 + }, + { + "epoch": 2.107165913492576, + "grad_norm": 1.163813995266034, + "learning_rate": 0.0002783144552352032, + "loss": 2.1056, + "step": 3264 + }, + { + "epoch": 2.1078114912847, + "grad_norm": 1.1621326590950323, + "learning_rate": 0.0002782809824694053, + "loss": 1.8152, + "step": 3265 + }, + { + "epoch": 2.108457069076824, + "grad_norm": 1.3106973296067368, + "learning_rate": 0.00027824748590615333, + "loss": 2.1006, + "step": 3266 + }, + { + "epoch": 2.1091026468689478, + "grad_norm": 1.1377526635050472, + "learning_rate": 0.0002782139655516612, + "loss": 2.0689, + "step": 3267 + }, + { + "epoch": 2.1097482246610717, + "grad_norm": 1.187779594534363, + "learning_rate": 0.0002781804214121473, + "loss": 1.8712, + "step": 3268 + }, + { + "epoch": 2.1103938024531956, + "grad_norm": 1.2075666490901533, + "learning_rate": 0.00027814685349383437, + "loss": 2.2692, + "step": 3269 + }, + { + "epoch": 2.1110393802453196, + "grad_norm": 1.1493736236060754, + "learning_rate": 0.0002781132618029496, + "loss": 1.8675, + "step": 3270 + }, + { + "epoch": 2.1116849580374435, + "grad_norm": 1.253100024025861, + "learning_rate": 0.00027807964634572475, + "loss": 2.3047, + "step": 3271 + }, + { + "epoch": 2.1123305358295674, + "grad_norm": 1.2091283469085643, + "learning_rate": 0.00027804600712839564, + "loss": 2.2112, + "step": 3272 + }, + { + "epoch": 2.1129761136216914, + "grad_norm": 1.1779456000674942, + "learning_rate": 0.00027801234415720274, + "loss": 1.937, + "step": 3273 + }, + { + "epoch": 2.1136216914138153, + "grad_norm": 1.1945608507361536, + "learning_rate": 0.000277978657438391, + "loss": 1.6812, + "step": 3274 + }, + { + "epoch": 2.114267269205939, + "grad_norm": 1.1891582208984717, + "learning_rate": 0.0002779449469782096, + "loss": 2.1679, + "step": 3275 + }, + { + "epoch": 2.114912846998063, + "grad_norm": 1.2490444216371837, + "learning_rate": 0.00027791121278291205, + "loss": 1.9102, + "step": 3276 + }, + { + "epoch": 2.115558424790187, + "grad_norm": 1.2351109061584435, + "learning_rate": 0.0002778774548587566, + "loss": 2.3187, + "step": 3277 + }, + { + "epoch": 2.116204002582311, + "grad_norm": 1.346371581493553, + "learning_rate": 0.0002778436732120055, + "loss": 2.0289, + "step": 3278 + }, + { + "epoch": 2.116849580374435, + "grad_norm": 1.1134695619457593, + "learning_rate": 0.0002778098678489258, + "loss": 1.7235, + "step": 3279 + }, + { + "epoch": 2.117495158166559, + "grad_norm": 1.2526767366105547, + "learning_rate": 0.0002777760387757886, + "loss": 2.1027, + "step": 3280 + }, + { + "epoch": 2.118140735958683, + "grad_norm": 1.4182714261169769, + "learning_rate": 0.00027774218599886956, + "loss": 2.0035, + "step": 3281 + }, + { + "epoch": 2.118786313750807, + "grad_norm": 1.3131222525919581, + "learning_rate": 0.0002777083095244487, + "loss": 1.8509, + "step": 3282 + }, + { + "epoch": 2.119431891542931, + "grad_norm": 1.165836803686914, + "learning_rate": 0.0002776744093588106, + "loss": 2.1085, + "step": 3283 + }, + { + "epoch": 2.120077469335055, + "grad_norm": 1.2020200126789888, + "learning_rate": 0.0002776404855082439, + "loss": 2.0115, + "step": 3284 + }, + { + "epoch": 2.120723047127179, + "grad_norm": 1.04224937288246, + "learning_rate": 0.00027760653797904197, + "loss": 1.5781, + "step": 3285 + }, + { + "epoch": 2.121368624919303, + "grad_norm": 1.1250563688527087, + "learning_rate": 0.0002775725667775024, + "loss": 2.1948, + "step": 3286 + }, + { + "epoch": 2.122014202711427, + "grad_norm": 1.2371388889389006, + "learning_rate": 0.0002775385719099271, + "loss": 1.9081, + "step": 3287 + }, + { + "epoch": 2.1226597805035508, + "grad_norm": 1.1666087754774255, + "learning_rate": 0.0002775045533826226, + "loss": 2.1637, + "step": 3288 + }, + { + "epoch": 2.1233053582956747, + "grad_norm": 1.1614602316861153, + "learning_rate": 0.00027747051120189964, + "loss": 1.6289, + "step": 3289 + }, + { + "epoch": 2.1239509360877986, + "grad_norm": 1.1906494951038957, + "learning_rate": 0.00027743644537407344, + "loss": 2.1626, + "step": 3290 + }, + { + "epoch": 2.1245965138799225, + "grad_norm": 1.2570418811376158, + "learning_rate": 0.00027740235590546346, + "loss": 1.9746, + "step": 3291 + }, + { + "epoch": 2.1252420916720465, + "grad_norm": 1.2745212100568564, + "learning_rate": 0.00027736824280239376, + "loss": 1.9071, + "step": 3292 + }, + { + "epoch": 2.1258876694641704, + "grad_norm": 1.1913386840676918, + "learning_rate": 0.0002773341060711926, + "loss": 2.0707, + "step": 3293 + }, + { + "epoch": 2.1265332472562943, + "grad_norm": 1.17204354947964, + "learning_rate": 0.00027729994571819267, + "loss": 2.042, + "step": 3294 + }, + { + "epoch": 2.1271788250484183, + "grad_norm": 1.1811577924537822, + "learning_rate": 0.00027726576174973123, + "loss": 2.0687, + "step": 3295 + }, + { + "epoch": 2.127824402840542, + "grad_norm": 1.3034678612840402, + "learning_rate": 0.0002772315541721496, + "loss": 2.2353, + "step": 3296 + }, + { + "epoch": 2.128469980632666, + "grad_norm": 1.3010874727946318, + "learning_rate": 0.0002771973229917937, + "loss": 2.0547, + "step": 3297 + }, + { + "epoch": 2.12911555842479, + "grad_norm": 1.1089356654881213, + "learning_rate": 0.0002771630682150138, + "loss": 1.635, + "step": 3298 + }, + { + "epoch": 2.129761136216914, + "grad_norm": 1.2733066440014653, + "learning_rate": 0.0002771287898481645, + "loss": 2.0838, + "step": 3299 + }, + { + "epoch": 2.130406714009038, + "grad_norm": 1.2004909396323518, + "learning_rate": 0.0002770944878976047, + "loss": 2.0997, + "step": 3300 + }, + { + "epoch": 2.131052291801162, + "grad_norm": 1.227064590295695, + "learning_rate": 0.0002770601623696979, + "loss": 2.3401, + "step": 3301 + }, + { + "epoch": 2.131697869593286, + "grad_norm": 1.1860753488354954, + "learning_rate": 0.0002770258132708118, + "loss": 2.2464, + "step": 3302 + }, + { + "epoch": 2.13234344738541, + "grad_norm": 1.1659635085860958, + "learning_rate": 0.00027699144060731846, + "loss": 1.9007, + "step": 3303 + }, + { + "epoch": 2.132989025177534, + "grad_norm": 1.1901951186679502, + "learning_rate": 0.0002769570443855944, + "loss": 2.0133, + "step": 3304 + }, + { + "epoch": 2.133634602969658, + "grad_norm": 1.2014491053497385, + "learning_rate": 0.00027692262461202056, + "loss": 1.862, + "step": 3305 + }, + { + "epoch": 2.134280180761782, + "grad_norm": 1.21368908491187, + "learning_rate": 0.000276888181292982, + "loss": 2.1383, + "step": 3306 + }, + { + "epoch": 2.134925758553906, + "grad_norm": 1.0370247431878175, + "learning_rate": 0.0002768537144348685, + "loss": 1.9905, + "step": 3307 + }, + { + "epoch": 2.13557133634603, + "grad_norm": 1.0783708187978893, + "learning_rate": 0.0002768192240440738, + "loss": 1.8939, + "step": 3308 + }, + { + "epoch": 2.1362169141381537, + "grad_norm": 1.2877003639973796, + "learning_rate": 0.00027678471012699646, + "loss": 2.1419, + "step": 3309 + }, + { + "epoch": 2.1368624919302777, + "grad_norm": 1.1629373532059746, + "learning_rate": 0.000276750172690039, + "loss": 2.018, + "step": 3310 + }, + { + "epoch": 2.1375080697224016, + "grad_norm": 1.2048305878634518, + "learning_rate": 0.00027671561173960854, + "loss": 2.082, + "step": 3311 + }, + { + "epoch": 2.1381536475145255, + "grad_norm": 1.3113353173688898, + "learning_rate": 0.0002766810272821165, + "loss": 2.1642, + "step": 3312 + }, + { + "epoch": 2.1387992253066495, + "grad_norm": 1.2657241686954324, + "learning_rate": 0.0002766464193239786, + "loss": 2.2114, + "step": 3313 + }, + { + "epoch": 2.1394448030987734, + "grad_norm": 1.07555749932853, + "learning_rate": 0.0002766117878716151, + "loss": 1.8072, + "step": 3314 + }, + { + "epoch": 2.1400903808908973, + "grad_norm": 1.2154154987823695, + "learning_rate": 0.0002765771329314503, + "loss": 2.2076, + "step": 3315 + }, + { + "epoch": 2.1407359586830212, + "grad_norm": 1.1817229732583217, + "learning_rate": 0.0002765424545099132, + "loss": 2.1198, + "step": 3316 + }, + { + "epoch": 2.141381536475145, + "grad_norm": 1.137539743577516, + "learning_rate": 0.00027650775261343707, + "loss": 1.7571, + "step": 3317 + }, + { + "epoch": 2.142027114267269, + "grad_norm": 1.3220405203458117, + "learning_rate": 0.0002764730272484593, + "loss": 2.1053, + "step": 3318 + }, + { + "epoch": 2.142672692059393, + "grad_norm": 1.2894677658369353, + "learning_rate": 0.0002764382784214219, + "loss": 2.0824, + "step": 3319 + }, + { + "epoch": 2.143318269851517, + "grad_norm": 1.0873756949877127, + "learning_rate": 0.0002764035061387711, + "loss": 1.9701, + "step": 3320 + }, + { + "epoch": 2.143963847643641, + "grad_norm": 1.2846672039011213, + "learning_rate": 0.00027636871040695747, + "loss": 2.165, + "step": 3321 + }, + { + "epoch": 2.144609425435765, + "grad_norm": 1.2029045049765492, + "learning_rate": 0.00027633389123243613, + "loss": 2.0262, + "step": 3322 + }, + { + "epoch": 2.1452550032278888, + "grad_norm": 1.2659049573010892, + "learning_rate": 0.0002762990486216663, + "loss": 2.232, + "step": 3323 + }, + { + "epoch": 2.1459005810200127, + "grad_norm": 1.181801115853145, + "learning_rate": 0.0002762641825811116, + "loss": 1.9535, + "step": 3324 + }, + { + "epoch": 2.146546158812137, + "grad_norm": 1.2431466894617267, + "learning_rate": 0.00027622929311724013, + "loss": 1.9681, + "step": 3325 + }, + { + "epoch": 2.147191736604261, + "grad_norm": 1.0969458050293064, + "learning_rate": 0.0002761943802365243, + "loss": 2.0733, + "step": 3326 + }, + { + "epoch": 2.147837314396385, + "grad_norm": 1.174398198419129, + "learning_rate": 0.0002761594439454406, + "loss": 1.998, + "step": 3327 + }, + { + "epoch": 2.148482892188509, + "grad_norm": 1.1412169822797544, + "learning_rate": 0.00027612448425047033, + "loss": 1.8741, + "step": 3328 + }, + { + "epoch": 2.149128469980633, + "grad_norm": 1.1397347149934496, + "learning_rate": 0.00027608950115809867, + "loss": 2.0396, + "step": 3329 + }, + { + "epoch": 2.1497740477727567, + "grad_norm": 1.25279761950128, + "learning_rate": 0.0002760544946748155, + "loss": 2.2185, + "step": 3330 + }, + { + "epoch": 2.1504196255648806, + "grad_norm": 1.1357216724040107, + "learning_rate": 0.00027601946480711474, + "loss": 1.9737, + "step": 3331 + }, + { + "epoch": 2.1510652033570046, + "grad_norm": 1.181453503917651, + "learning_rate": 0.00027598441156149496, + "loss": 2.104, + "step": 3332 + }, + { + "epoch": 2.1517107811491285, + "grad_norm": 1.1797276239840921, + "learning_rate": 0.0002759493349444587, + "loss": 1.8919, + "step": 3333 + }, + { + "epoch": 2.1523563589412524, + "grad_norm": 1.3450165447004092, + "learning_rate": 0.00027591423496251325, + "loss": 2.3296, + "step": 3334 + }, + { + "epoch": 2.1530019367333764, + "grad_norm": 1.1609802617898644, + "learning_rate": 0.0002758791116221699, + "loss": 2.1533, + "step": 3335 + }, + { + "epoch": 2.1536475145255003, + "grad_norm": 1.246784250164722, + "learning_rate": 0.00027584396492994433, + "loss": 2.1074, + "step": 3336 + }, + { + "epoch": 2.1542930923176242, + "grad_norm": 1.2657981309563682, + "learning_rate": 0.00027580879489235674, + "loss": 1.7181, + "step": 3337 + }, + { + "epoch": 2.154938670109748, + "grad_norm": 1.1766231733339854, + "learning_rate": 0.0002757736015159315, + "loss": 2.1973, + "step": 3338 + }, + { + "epoch": 2.155584247901872, + "grad_norm": 1.1091815723839555, + "learning_rate": 0.0002757383848071974, + "loss": 2.0751, + "step": 3339 + }, + { + "epoch": 2.156229825693996, + "grad_norm": 1.2366409439333887, + "learning_rate": 0.0002757031447726874, + "loss": 2.3717, + "step": 3340 + }, + { + "epoch": 2.15687540348612, + "grad_norm": 1.1380295806361556, + "learning_rate": 0.0002756678814189389, + "loss": 1.9542, + "step": 3341 + }, + { + "epoch": 2.157520981278244, + "grad_norm": 1.2332043234526655, + "learning_rate": 0.0002756325947524938, + "loss": 2.0518, + "step": 3342 + }, + { + "epoch": 2.158166559070368, + "grad_norm": 1.3070220772129733, + "learning_rate": 0.00027559728477989784, + "loss": 2.0909, + "step": 3343 + }, + { + "epoch": 2.1588121368624917, + "grad_norm": 1.2529271647668518, + "learning_rate": 0.00027556195150770164, + "loss": 2.0586, + "step": 3344 + }, + { + "epoch": 2.159457714654616, + "grad_norm": 1.2666015605267835, + "learning_rate": 0.0002755265949424598, + "loss": 1.6612, + "step": 3345 + }, + { + "epoch": 2.16010329244674, + "grad_norm": 1.1217284887551524, + "learning_rate": 0.0002754912150907313, + "loss": 1.8009, + "step": 3346 + }, + { + "epoch": 2.160748870238864, + "grad_norm": 1.223335613289106, + "learning_rate": 0.0002754558119590795, + "loss": 2.3492, + "step": 3347 + }, + { + "epoch": 2.161394448030988, + "grad_norm": 1.207365933979319, + "learning_rate": 0.00027542038555407203, + "loss": 2.2649, + "step": 3348 + }, + { + "epoch": 2.162040025823112, + "grad_norm": 1.160758335388476, + "learning_rate": 0.00027538493588228084, + "loss": 2.1169, + "step": 3349 + }, + { + "epoch": 2.1626856036152358, + "grad_norm": 1.1714212021908725, + "learning_rate": 0.0002753494629502823, + "loss": 1.9811, + "step": 3350 + }, + { + "epoch": 2.1633311814073597, + "grad_norm": 1.1859627917045645, + "learning_rate": 0.00027531396676465695, + "loss": 2.0907, + "step": 3351 + }, + { + "epoch": 2.1639767591994836, + "grad_norm": 1.1966017077768236, + "learning_rate": 0.00027527844733198965, + "loss": 2.1851, + "step": 3352 + }, + { + "epoch": 2.1646223369916076, + "grad_norm": 1.1386285500776543, + "learning_rate": 0.0002752429046588697, + "loss": 1.9314, + "step": 3353 + }, + { + "epoch": 2.1652679147837315, + "grad_norm": 1.174599608783998, + "learning_rate": 0.0002752073387518907, + "loss": 2.2187, + "step": 3354 + }, + { + "epoch": 2.1659134925758554, + "grad_norm": 1.2333208693357691, + "learning_rate": 0.0002751717496176503, + "loss": 2.0796, + "step": 3355 + }, + { + "epoch": 2.1665590703679793, + "grad_norm": 1.2021292225907556, + "learning_rate": 0.0002751361372627508, + "loss": 2.13, + "step": 3356 + }, + { + "epoch": 2.1672046481601033, + "grad_norm": 1.1972260329177855, + "learning_rate": 0.0002751005016937986, + "loss": 1.8579, + "step": 3357 + }, + { + "epoch": 2.167850225952227, + "grad_norm": 1.3694215324872971, + "learning_rate": 0.0002750648429174045, + "loss": 2.0459, + "step": 3358 + }, + { + "epoch": 2.168495803744351, + "grad_norm": 1.1588353411765824, + "learning_rate": 0.0002750291609401836, + "loss": 2.0577, + "step": 3359 + }, + { + "epoch": 2.169141381536475, + "grad_norm": 1.2917367457877489, + "learning_rate": 0.00027499345576875516, + "loss": 2.1203, + "step": 3360 + }, + { + "epoch": 2.169786959328599, + "grad_norm": 1.1686169034701177, + "learning_rate": 0.000274957727409743, + "loss": 1.7795, + "step": 3361 + }, + { + "epoch": 2.170432537120723, + "grad_norm": 1.206569395681077, + "learning_rate": 0.000274921975869775, + "loss": 1.8249, + "step": 3362 + }, + { + "epoch": 2.171078114912847, + "grad_norm": 1.3598368572371462, + "learning_rate": 0.0002748862011554835, + "loss": 2.2835, + "step": 3363 + }, + { + "epoch": 2.171723692704971, + "grad_norm": 1.2815349967534184, + "learning_rate": 0.00027485040327350507, + "loss": 2.0841, + "step": 3364 + }, + { + "epoch": 2.1723692704970947, + "grad_norm": 1.2236129066727113, + "learning_rate": 0.0002748145822304806, + "loss": 2.0979, + "step": 3365 + }, + { + "epoch": 2.1730148482892186, + "grad_norm": 1.4223069091381964, + "learning_rate": 0.00027477873803305523, + "loss": 1.9978, + "step": 3366 + }, + { + "epoch": 2.1736604260813426, + "grad_norm": 1.1466927204213562, + "learning_rate": 0.0002747428706878784, + "loss": 2.0996, + "step": 3367 + }, + { + "epoch": 2.174306003873467, + "grad_norm": 1.1876273979310097, + "learning_rate": 0.000274706980201604, + "loss": 2.1139, + "step": 3368 + }, + { + "epoch": 2.174951581665591, + "grad_norm": 1.2774452550819535, + "learning_rate": 0.00027467106658089, + "loss": 2.1808, + "step": 3369 + }, + { + "epoch": 2.175597159457715, + "grad_norm": 1.245521238273632, + "learning_rate": 0.0002746351298323987, + "loss": 2.3767, + "step": 3370 + }, + { + "epoch": 2.1762427372498387, + "grad_norm": 1.3369737224191698, + "learning_rate": 0.00027459916996279687, + "loss": 2.0685, + "step": 3371 + }, + { + "epoch": 2.1768883150419627, + "grad_norm": 1.1073988266159256, + "learning_rate": 0.00027456318697875536, + "loss": 1.9535, + "step": 3372 + }, + { + "epoch": 2.1775338928340866, + "grad_norm": 1.1824104271848277, + "learning_rate": 0.00027452718088694935, + "loss": 1.9604, + "step": 3373 + }, + { + "epoch": 2.1781794706262105, + "grad_norm": 1.1924009868783494, + "learning_rate": 0.00027449115169405844, + "loss": 1.9847, + "step": 3374 + }, + { + "epoch": 2.1788250484183345, + "grad_norm": 1.1401736574393455, + "learning_rate": 0.00027445509940676635, + "loss": 2.0409, + "step": 3375 + }, + { + "epoch": 2.1794706262104584, + "grad_norm": 1.2002349959159473, + "learning_rate": 0.00027441902403176115, + "loss": 2.03, + "step": 3376 + }, + { + "epoch": 2.1801162040025823, + "grad_norm": 1.1280210596916693, + "learning_rate": 0.0002743829255757353, + "loss": 2.0492, + "step": 3377 + }, + { + "epoch": 2.1807617817947063, + "grad_norm": 1.2224223936825753, + "learning_rate": 0.00027434680404538525, + "loss": 2.1377, + "step": 3378 + }, + { + "epoch": 2.18140735958683, + "grad_norm": 1.350008493529457, + "learning_rate": 0.0002743106594474121, + "loss": 2.1869, + "step": 3379 + }, + { + "epoch": 2.182052937378954, + "grad_norm": 1.1741303349322598, + "learning_rate": 0.00027427449178852095, + "loss": 1.9513, + "step": 3380 + }, + { + "epoch": 2.182698515171078, + "grad_norm": 1.2614765468900344, + "learning_rate": 0.0002742383010754213, + "loss": 2.1582, + "step": 3381 + }, + { + "epoch": 2.183344092963202, + "grad_norm": 1.1901440205636453, + "learning_rate": 0.00027420208731482695, + "loss": 1.9894, + "step": 3382 + }, + { + "epoch": 2.183989670755326, + "grad_norm": 1.1588892792375904, + "learning_rate": 0.0002741658505134558, + "loss": 2.1241, + "step": 3383 + }, + { + "epoch": 2.18463524854745, + "grad_norm": 1.03430084414211, + "learning_rate": 0.0002741295906780303, + "loss": 1.8971, + "step": 3384 + }, + { + "epoch": 2.1852808263395738, + "grad_norm": 1.2389337404556786, + "learning_rate": 0.00027409330781527695, + "loss": 2.1207, + "step": 3385 + }, + { + "epoch": 2.1859264041316977, + "grad_norm": 1.2041367630532491, + "learning_rate": 0.0002740570019319266, + "loss": 2.1138, + "step": 3386 + }, + { + "epoch": 2.1865719819238216, + "grad_norm": 1.144802189497276, + "learning_rate": 0.0002740206730347144, + "loss": 1.9342, + "step": 3387 + }, + { + "epoch": 2.187217559715946, + "grad_norm": 1.1468997574894448, + "learning_rate": 0.0002739843211303797, + "loss": 2.0508, + "step": 3388 + }, + { + "epoch": 2.18786313750807, + "grad_norm": 1.1206244927334308, + "learning_rate": 0.0002739479462256662, + "loss": 2.0714, + "step": 3389 + }, + { + "epoch": 2.188508715300194, + "grad_norm": 1.0853665676275923, + "learning_rate": 0.00027391154832732175, + "loss": 2.0244, + "step": 3390 + }, + { + "epoch": 2.189154293092318, + "grad_norm": 1.321107020774685, + "learning_rate": 0.00027387512744209857, + "loss": 1.9927, + "step": 3391 + }, + { + "epoch": 2.1897998708844417, + "grad_norm": 1.1287921953513844, + "learning_rate": 0.0002738386835767532, + "loss": 2.0779, + "step": 3392 + }, + { + "epoch": 2.1904454486765657, + "grad_norm": 1.2068999078144622, + "learning_rate": 0.0002738022167380462, + "loss": 2.0191, + "step": 3393 + }, + { + "epoch": 2.1910910264686896, + "grad_norm": 1.215558417230867, + "learning_rate": 0.00027376572693274266, + "loss": 1.8919, + "step": 3394 + }, + { + "epoch": 2.1917366042608135, + "grad_norm": 1.1852585510691231, + "learning_rate": 0.00027372921416761177, + "loss": 2.157, + "step": 3395 + }, + { + "epoch": 2.1923821820529374, + "grad_norm": 1.269889510745539, + "learning_rate": 0.0002736926784494271, + "loss": 2.1575, + "step": 3396 + }, + { + "epoch": 2.1930277598450614, + "grad_norm": 1.187721358883, + "learning_rate": 0.0002736561197849663, + "loss": 2.0596, + "step": 3397 + }, + { + "epoch": 2.1936733376371853, + "grad_norm": 1.1227703445691772, + "learning_rate": 0.00027361953818101143, + "loss": 1.8803, + "step": 3398 + }, + { + "epoch": 2.1943189154293092, + "grad_norm": 1.233510714602027, + "learning_rate": 0.00027358293364434876, + "loss": 2.0158, + "step": 3399 + }, + { + "epoch": 2.194964493221433, + "grad_norm": 1.2512879963069223, + "learning_rate": 0.0002735463061817688, + "loss": 1.9934, + "step": 3400 + }, + { + "epoch": 2.195610071013557, + "grad_norm": 1.2587412376678113, + "learning_rate": 0.00027350965580006634, + "loss": 2.154, + "step": 3401 + }, + { + "epoch": 2.196255648805681, + "grad_norm": 1.3243863781328533, + "learning_rate": 0.0002734729825060404, + "loss": 2.254, + "step": 3402 + }, + { + "epoch": 2.196901226597805, + "grad_norm": 1.1790254188468543, + "learning_rate": 0.00027343628630649425, + "loss": 1.7906, + "step": 3403 + }, + { + "epoch": 2.197546804389929, + "grad_norm": 1.2176905294137454, + "learning_rate": 0.0002733995672082355, + "loss": 2.1569, + "step": 3404 + }, + { + "epoch": 2.198192382182053, + "grad_norm": 1.1574934493993323, + "learning_rate": 0.0002733628252180757, + "loss": 1.77, + "step": 3405 + }, + { + "epoch": 2.1988379599741767, + "grad_norm": 1.2703933118206874, + "learning_rate": 0.0002733260603428311, + "loss": 1.9938, + "step": 3406 + }, + { + "epoch": 2.1994835377663007, + "grad_norm": 1.1486892617143325, + "learning_rate": 0.0002732892725893219, + "loss": 2.1032, + "step": 3407 + }, + { + "epoch": 2.2001291155584246, + "grad_norm": 1.289008912271642, + "learning_rate": 0.00027325246196437255, + "loss": 2.0493, + "step": 3408 + }, + { + "epoch": 2.2007746933505485, + "grad_norm": 1.2954204743676057, + "learning_rate": 0.00027321562847481184, + "loss": 2.1121, + "step": 3409 + }, + { + "epoch": 2.2014202711426725, + "grad_norm": 1.3109935380860398, + "learning_rate": 0.0002731787721274728, + "loss": 2.1952, + "step": 3410 + }, + { + "epoch": 2.202065848934797, + "grad_norm": 1.210689181273627, + "learning_rate": 0.0002731418929291927, + "loss": 2.1641, + "step": 3411 + }, + { + "epoch": 2.2027114267269208, + "grad_norm": 1.2560658295747904, + "learning_rate": 0.00027310499088681285, + "loss": 2.2323, + "step": 3412 + }, + { + "epoch": 2.2033570045190447, + "grad_norm": 1.2923380758796046, + "learning_rate": 0.00027306806600717906, + "loss": 2.1766, + "step": 3413 + }, + { + "epoch": 2.2040025823111686, + "grad_norm": 1.0764055843679998, + "learning_rate": 0.0002730311182971413, + "loss": 1.6262, + "step": 3414 + }, + { + "epoch": 2.2046481601032926, + "grad_norm": 1.0991404037502301, + "learning_rate": 0.00027299414776355376, + "loss": 2.0409, + "step": 3415 + }, + { + "epoch": 2.2052937378954165, + "grad_norm": 1.120036548073982, + "learning_rate": 0.0002729571544132748, + "loss": 2.1553, + "step": 3416 + }, + { + "epoch": 2.2059393156875404, + "grad_norm": 1.071929845092537, + "learning_rate": 0.0002729201382531671, + "loss": 2.0202, + "step": 3417 + }, + { + "epoch": 2.2065848934796644, + "grad_norm": 1.2751023205162881, + "learning_rate": 0.0002728830992900976, + "loss": 2.2034, + "step": 3418 + }, + { + "epoch": 2.2072304712717883, + "grad_norm": 1.1428476272868746, + "learning_rate": 0.0002728460375309373, + "loss": 1.6677, + "step": 3419 + }, + { + "epoch": 2.207876049063912, + "grad_norm": 1.1803892179572801, + "learning_rate": 0.0002728089529825616, + "loss": 1.9868, + "step": 3420 + }, + { + "epoch": 2.208521626856036, + "grad_norm": 1.1517151290942593, + "learning_rate": 0.0002727718456518501, + "loss": 2.119, + "step": 3421 + }, + { + "epoch": 2.20916720464816, + "grad_norm": 1.1832826351663872, + "learning_rate": 0.00027273471554568657, + "loss": 1.9526, + "step": 3422 + }, + { + "epoch": 2.209812782440284, + "grad_norm": 1.1880916116710174, + "learning_rate": 0.000272697562670959, + "loss": 2.2912, + "step": 3423 + }, + { + "epoch": 2.210458360232408, + "grad_norm": 1.1711888090759255, + "learning_rate": 0.0002726603870345596, + "loss": 2.058, + "step": 3424 + }, + { + "epoch": 2.211103938024532, + "grad_norm": 1.2031441878779232, + "learning_rate": 0.000272623188643385, + "loss": 2.0032, + "step": 3425 + }, + { + "epoch": 2.211749515816656, + "grad_norm": 1.2618997432704415, + "learning_rate": 0.0002725859675043357, + "loss": 1.8378, + "step": 3426 + }, + { + "epoch": 2.2123950936087797, + "grad_norm": 1.2094377316910827, + "learning_rate": 0.0002725487236243167, + "loss": 2.3147, + "step": 3427 + }, + { + "epoch": 2.2130406714009037, + "grad_norm": 1.1897844548599563, + "learning_rate": 0.00027251145701023713, + "loss": 1.9336, + "step": 3428 + }, + { + "epoch": 2.2136862491930276, + "grad_norm": 1.1880345302331963, + "learning_rate": 0.00027247416766901033, + "loss": 2.0267, + "step": 3429 + }, + { + "epoch": 2.2143318269851515, + "grad_norm": 1.1868203102696264, + "learning_rate": 0.0002724368556075538, + "loss": 2.0083, + "step": 3430 + }, + { + "epoch": 2.214977404777276, + "grad_norm": 1.2621725800718704, + "learning_rate": 0.00027239952083278943, + "loss": 2.0239, + "step": 3431 + }, + { + "epoch": 2.2156229825694, + "grad_norm": 1.1458482519846622, + "learning_rate": 0.0002723621633516431, + "loss": 2.0092, + "step": 3432 + }, + { + "epoch": 2.2162685603615238, + "grad_norm": 1.13124158737683, + "learning_rate": 0.00027232478317104505, + "loss": 2.0311, + "step": 3433 + }, + { + "epoch": 2.2169141381536477, + "grad_norm": 1.464359387090233, + "learning_rate": 0.00027228738029792967, + "loss": 2.3073, + "step": 3434 + }, + { + "epoch": 2.2175597159457716, + "grad_norm": 1.2495169034731037, + "learning_rate": 0.00027224995473923566, + "loss": 1.9449, + "step": 3435 + }, + { + "epoch": 2.2182052937378955, + "grad_norm": 1.1327950535652263, + "learning_rate": 0.00027221250650190577, + "loss": 1.9796, + "step": 3436 + }, + { + "epoch": 2.2188508715300195, + "grad_norm": 1.1846773416028809, + "learning_rate": 0.000272175035592887, + "loss": 2.1717, + "step": 3437 + }, + { + "epoch": 2.2194964493221434, + "grad_norm": 1.2370956037616898, + "learning_rate": 0.0002721375420191308, + "loss": 1.8451, + "step": 3438 + }, + { + "epoch": 2.2201420271142673, + "grad_norm": 1.178500610027287, + "learning_rate": 0.0002721000257875924, + "loss": 1.9521, + "step": 3439 + }, + { + "epoch": 2.2207876049063913, + "grad_norm": 1.2262794378445216, + "learning_rate": 0.00027206248690523146, + "loss": 2.2155, + "step": 3440 + }, + { + "epoch": 2.221433182698515, + "grad_norm": 1.431629136892542, + "learning_rate": 0.00027202492537901197, + "loss": 2.2223, + "step": 3441 + }, + { + "epoch": 2.222078760490639, + "grad_norm": 1.2673637046546442, + "learning_rate": 0.0002719873412159019, + "loss": 2.0946, + "step": 3442 + }, + { + "epoch": 2.222724338282763, + "grad_norm": 1.2414981080537488, + "learning_rate": 0.00027194973442287344, + "loss": 2.2518, + "step": 3443 + }, + { + "epoch": 2.223369916074887, + "grad_norm": 1.296544324901308, + "learning_rate": 0.0002719121050069032, + "loss": 2.148, + "step": 3444 + }, + { + "epoch": 2.224015493867011, + "grad_norm": 1.3359744631418313, + "learning_rate": 0.0002718744529749717, + "loss": 2.2297, + "step": 3445 + }, + { + "epoch": 2.224661071659135, + "grad_norm": 1.1867381583268763, + "learning_rate": 0.0002718367783340638, + "loss": 2.0851, + "step": 3446 + }, + { + "epoch": 2.225306649451259, + "grad_norm": 1.131406422729333, + "learning_rate": 0.0002717990810911686, + "loss": 1.9625, + "step": 3447 + }, + { + "epoch": 2.2259522272433827, + "grad_norm": 1.0839230743337, + "learning_rate": 0.0002717613612532792, + "loss": 1.8712, + "step": 3448 + }, + { + "epoch": 2.2265978050355066, + "grad_norm": 1.1238424145153363, + "learning_rate": 0.0002717236188273932, + "loss": 1.6105, + "step": 3449 + }, + { + "epoch": 2.2272433828276306, + "grad_norm": 1.2900285652219319, + "learning_rate": 0.0002716858538205121, + "loss": 2.0446, + "step": 3450 + }, + { + "epoch": 2.2278889606197545, + "grad_norm": 1.184436619696055, + "learning_rate": 0.00027164806623964174, + "loss": 2.043, + "step": 3451 + }, + { + "epoch": 2.2285345384118784, + "grad_norm": 1.0506555544594698, + "learning_rate": 0.000271610256091792, + "loss": 1.5794, + "step": 3452 + }, + { + "epoch": 2.2291801162040024, + "grad_norm": 1.335466592393088, + "learning_rate": 0.0002715724233839772, + "loss": 2.1288, + "step": 3453 + }, + { + "epoch": 2.2298256939961267, + "grad_norm": 1.2776919078365006, + "learning_rate": 0.00027153456812321566, + "loss": 2.2085, + "step": 3454 + }, + { + "epoch": 2.2304712717882507, + "grad_norm": 1.2050713262645016, + "learning_rate": 0.00027149669031652994, + "loss": 2.1793, + "step": 3455 + }, + { + "epoch": 2.2311168495803746, + "grad_norm": 1.2360172118689106, + "learning_rate": 0.0002714587899709467, + "loss": 2.0578, + "step": 3456 + }, + { + "epoch": 2.2317624273724985, + "grad_norm": 1.351947500797182, + "learning_rate": 0.00027142086709349686, + "loss": 2.0444, + "step": 3457 + }, + { + "epoch": 2.2324080051646225, + "grad_norm": 1.221430304590685, + "learning_rate": 0.00027138292169121556, + "loss": 1.8912, + "step": 3458 + }, + { + "epoch": 2.2330535829567464, + "grad_norm": 1.1600307824040268, + "learning_rate": 0.000271344953771142, + "loss": 2.2351, + "step": 3459 + }, + { + "epoch": 2.2336991607488703, + "grad_norm": 1.1171207218240846, + "learning_rate": 0.00027130696334031975, + "loss": 2.0862, + "step": 3460 + }, + { + "epoch": 2.2343447385409942, + "grad_norm": 1.1561283041480994, + "learning_rate": 0.00027126895040579624, + "loss": 2.1757, + "step": 3461 + }, + { + "epoch": 2.234990316333118, + "grad_norm": 1.1493811267744425, + "learning_rate": 0.00027123091497462343, + "loss": 1.9063, + "step": 3462 + }, + { + "epoch": 2.235635894125242, + "grad_norm": 1.1214131378806655, + "learning_rate": 0.0002711928570538572, + "loss": 2.0496, + "step": 3463 + }, + { + "epoch": 2.236281471917366, + "grad_norm": 1.1705788022405874, + "learning_rate": 0.0002711547766505577, + "loss": 1.9713, + "step": 3464 + }, + { + "epoch": 2.23692704970949, + "grad_norm": 1.164733589614244, + "learning_rate": 0.00027111667377178924, + "loss": 2.2315, + "step": 3465 + }, + { + "epoch": 2.237572627501614, + "grad_norm": 1.3428282288705926, + "learning_rate": 0.0002710785484246203, + "loss": 2.2458, + "step": 3466 + }, + { + "epoch": 2.238218205293738, + "grad_norm": 1.3482858159357145, + "learning_rate": 0.00027104040061612356, + "loss": 2.2807, + "step": 3467 + }, + { + "epoch": 2.2388637830858618, + "grad_norm": 1.1103652700367839, + "learning_rate": 0.0002710022303533758, + "loss": 1.9702, + "step": 3468 + }, + { + "epoch": 2.2395093608779857, + "grad_norm": 1.179903648569323, + "learning_rate": 0.00027096403764345807, + "loss": 2.1632, + "step": 3469 + }, + { + "epoch": 2.2401549386701096, + "grad_norm": 1.2765939102575299, + "learning_rate": 0.0002709258224934554, + "loss": 2.132, + "step": 3470 + }, + { + "epoch": 2.2408005164622335, + "grad_norm": 1.2374525656875461, + "learning_rate": 0.0002708875849104572, + "loss": 2.3103, + "step": 3471 + }, + { + "epoch": 2.2414460942543575, + "grad_norm": 1.1083389521812144, + "learning_rate": 0.00027084932490155687, + "loss": 1.6284, + "step": 3472 + }, + { + "epoch": 2.2420916720464814, + "grad_norm": 1.4028032186551629, + "learning_rate": 0.00027081104247385194, + "loss": 2.1697, + "step": 3473 + }, + { + "epoch": 2.242737249838606, + "grad_norm": 1.1874706339570982, + "learning_rate": 0.0002707727376344445, + "loss": 1.9686, + "step": 3474 + }, + { + "epoch": 2.2433828276307297, + "grad_norm": 1.1961831054360164, + "learning_rate": 0.00027073441039044023, + "loss": 2.1859, + "step": 3475 + }, + { + "epoch": 2.2440284054228536, + "grad_norm": 1.2138602154952778, + "learning_rate": 0.00027069606074894934, + "loss": 2.2632, + "step": 3476 + }, + { + "epoch": 2.2446739832149776, + "grad_norm": 1.177784245603816, + "learning_rate": 0.000270657688717086, + "loss": 1.9189, + "step": 3477 + }, + { + "epoch": 2.2453195610071015, + "grad_norm": 1.2214734697423841, + "learning_rate": 0.00027061929430196875, + "loss": 2.2079, + "step": 3478 + }, + { + "epoch": 2.2459651387992254, + "grad_norm": 1.1294022799064294, + "learning_rate": 0.0002705808775107201, + "loss": 2.1575, + "step": 3479 + }, + { + "epoch": 2.2466107165913494, + "grad_norm": 1.107717358797502, + "learning_rate": 0.0002705424383504667, + "loss": 1.8787, + "step": 3480 + }, + { + "epoch": 2.2472562943834733, + "grad_norm": 1.3181594677554123, + "learning_rate": 0.00027050397682833947, + "loss": 2.2604, + "step": 3481 + }, + { + "epoch": 2.2479018721755972, + "grad_norm": 1.2447953180840172, + "learning_rate": 0.00027046549295147343, + "loss": 2.2377, + "step": 3482 + }, + { + "epoch": 2.248547449967721, + "grad_norm": 1.1357498494248033, + "learning_rate": 0.00027042698672700776, + "loss": 2.27, + "step": 3483 + }, + { + "epoch": 2.249193027759845, + "grad_norm": 1.066650539039362, + "learning_rate": 0.0002703884581620857, + "loss": 2.0178, + "step": 3484 + }, + { + "epoch": 2.249838605551969, + "grad_norm": 1.0719109334925263, + "learning_rate": 0.00027034990726385467, + "loss": 1.8688, + "step": 3485 + }, + { + "epoch": 2.250484183344093, + "grad_norm": 1.2023940513690112, + "learning_rate": 0.00027031133403946636, + "loss": 1.9188, + "step": 3486 + }, + { + "epoch": 2.251129761136217, + "grad_norm": 1.2000491977305214, + "learning_rate": 0.00027027273849607647, + "loss": 2.3705, + "step": 3487 + }, + { + "epoch": 2.251775338928341, + "grad_norm": 1.409139416429827, + "learning_rate": 0.00027023412064084486, + "loss": 1.8543, + "step": 3488 + }, + { + "epoch": 2.2524209167204647, + "grad_norm": 1.3371517020034434, + "learning_rate": 0.00027019548048093557, + "loss": 2.1561, + "step": 3489 + }, + { + "epoch": 2.2530664945125887, + "grad_norm": 1.2686107468030643, + "learning_rate": 0.0002701568180235167, + "loss": 2.0645, + "step": 3490 + }, + { + "epoch": 2.2537120723047126, + "grad_norm": 1.0954258536873482, + "learning_rate": 0.00027011813327576056, + "loss": 1.7843, + "step": 3491 + }, + { + "epoch": 2.2543576500968365, + "grad_norm": 1.140391725459588, + "learning_rate": 0.0002700794262448436, + "loss": 2.0299, + "step": 3492 + }, + { + "epoch": 2.2550032278889605, + "grad_norm": 1.1766245876789798, + "learning_rate": 0.00027004069693794636, + "loss": 2.0643, + "step": 3493 + }, + { + "epoch": 2.2556488056810844, + "grad_norm": 1.2658713171084084, + "learning_rate": 0.0002700019453622535, + "loss": 2.0993, + "step": 3494 + }, + { + "epoch": 2.2562943834732083, + "grad_norm": 1.3609244704358803, + "learning_rate": 0.00026996317152495387, + "loss": 2.2524, + "step": 3495 + }, + { + "epoch": 2.2569399612653323, + "grad_norm": 1.1952492967728297, + "learning_rate": 0.00026992437543324043, + "loss": 2.0276, + "step": 3496 + }, + { + "epoch": 2.257585539057456, + "grad_norm": 1.2926659108802727, + "learning_rate": 0.0002698855570943102, + "loss": 2.0537, + "step": 3497 + }, + { + "epoch": 2.2582311168495806, + "grad_norm": 1.169160890394963, + "learning_rate": 0.00026984671651536453, + "loss": 2.2095, + "step": 3498 + }, + { + "epoch": 2.2588766946417045, + "grad_norm": 1.2709059820139308, + "learning_rate": 0.00026980785370360857, + "loss": 2.3479, + "step": 3499 + }, + { + "epoch": 2.2595222724338284, + "grad_norm": 1.2174264425841785, + "learning_rate": 0.00026976896866625185, + "loss": 2.1272, + "step": 3500 + }, + { + "epoch": 2.2601678502259523, + "grad_norm": 1.1101196897387666, + "learning_rate": 0.0002697300614105079, + "loss": 2.0411, + "step": 3501 + }, + { + "epoch": 2.2608134280180763, + "grad_norm": 1.2218794406454432, + "learning_rate": 0.0002696911319435946, + "loss": 2.1867, + "step": 3502 + }, + { + "epoch": 2.2614590058102, + "grad_norm": 1.1515952544526662, + "learning_rate": 0.00026965218027273353, + "loss": 1.9613, + "step": 3503 + }, + { + "epoch": 2.262104583602324, + "grad_norm": 1.2907129494488938, + "learning_rate": 0.00026961320640515077, + "loss": 2.1751, + "step": 3504 + }, + { + "epoch": 2.262750161394448, + "grad_norm": 1.1470878157261433, + "learning_rate": 0.0002695742103480763, + "loss": 2.1231, + "step": 3505 + }, + { + "epoch": 2.263395739186572, + "grad_norm": 1.0443092772331004, + "learning_rate": 0.00026953519210874444, + "loss": 1.8589, + "step": 3506 + }, + { + "epoch": 2.264041316978696, + "grad_norm": 1.2157925326252814, + "learning_rate": 0.0002694961516943933, + "loss": 1.7279, + "step": 3507 + }, + { + "epoch": 2.26468689477082, + "grad_norm": 1.083888637009533, + "learning_rate": 0.00026945708911226544, + "loss": 1.8743, + "step": 3508 + }, + { + "epoch": 2.265332472562944, + "grad_norm": 1.1720925039319487, + "learning_rate": 0.0002694180043696072, + "loss": 2.066, + "step": 3509 + }, + { + "epoch": 2.2659780503550677, + "grad_norm": 1.2161683462610011, + "learning_rate": 0.0002693788974736694, + "loss": 1.9249, + "step": 3510 + }, + { + "epoch": 2.2666236281471916, + "grad_norm": 1.2056182426336017, + "learning_rate": 0.00026933976843170656, + "loss": 2.0888, + "step": 3511 + }, + { + "epoch": 2.2672692059393156, + "grad_norm": 1.25823838028326, + "learning_rate": 0.0002693006172509777, + "loss": 2.2313, + "step": 3512 + }, + { + "epoch": 2.2679147837314395, + "grad_norm": 1.2726764030156732, + "learning_rate": 0.00026926144393874576, + "loss": 2.241, + "step": 3513 + }, + { + "epoch": 2.2685603615235634, + "grad_norm": 1.0908284682555858, + "learning_rate": 0.0002692222485022777, + "loss": 2.2038, + "step": 3514 + }, + { + "epoch": 2.2692059393156874, + "grad_norm": 1.0321374966296657, + "learning_rate": 0.00026918303094884466, + "loss": 1.891, + "step": 3515 + }, + { + "epoch": 2.2698515171078113, + "grad_norm": 1.1976099588575198, + "learning_rate": 0.00026914379128572207, + "loss": 2.2087, + "step": 3516 + }, + { + "epoch": 2.2704970948999357, + "grad_norm": 1.1145371393431704, + "learning_rate": 0.00026910452952018913, + "loss": 2.0217, + "step": 3517 + }, + { + "epoch": 2.2711426726920596, + "grad_norm": 1.1596357911814736, + "learning_rate": 0.0002690652456595294, + "loss": 2.0587, + "step": 3518 + }, + { + "epoch": 2.2717882504841835, + "grad_norm": 1.1090102802974966, + "learning_rate": 0.0002690259397110304, + "loss": 1.8447, + "step": 3519 + }, + { + "epoch": 2.2724338282763075, + "grad_norm": 1.1928750818347276, + "learning_rate": 0.00026898661168198385, + "loss": 1.9179, + "step": 3520 + }, + { + "epoch": 2.2730794060684314, + "grad_norm": 1.0627171918666463, + "learning_rate": 0.0002689472615796854, + "loss": 1.9584, + "step": 3521 + }, + { + "epoch": 2.2737249838605553, + "grad_norm": 1.1579504329825712, + "learning_rate": 0.000268907889411435, + "loss": 2.0521, + "step": 3522 + }, + { + "epoch": 2.2743705616526793, + "grad_norm": 1.142902725913919, + "learning_rate": 0.0002688684951845365, + "loss": 2.012, + "step": 3523 + }, + { + "epoch": 2.275016139444803, + "grad_norm": 1.1683788228994194, + "learning_rate": 0.0002688290789062981, + "loss": 2.262, + "step": 3524 + }, + { + "epoch": 2.275661717236927, + "grad_norm": 1.321318564548378, + "learning_rate": 0.00026878964058403175, + "loss": 2.3308, + "step": 3525 + }, + { + "epoch": 2.276307295029051, + "grad_norm": 1.1429134761131918, + "learning_rate": 0.0002687501802250538, + "loss": 1.9867, + "step": 3526 + }, + { + "epoch": 2.276952872821175, + "grad_norm": 1.2256961571429368, + "learning_rate": 0.00026871069783668445, + "loss": 2.1186, + "step": 3527 + }, + { + "epoch": 2.277598450613299, + "grad_norm": 1.1058408357473792, + "learning_rate": 0.00026867119342624817, + "loss": 1.8382, + "step": 3528 + }, + { + "epoch": 2.278244028405423, + "grad_norm": 1.190304252749363, + "learning_rate": 0.00026863166700107343, + "loss": 2.1591, + "step": 3529 + }, + { + "epoch": 2.2788896061975468, + "grad_norm": 1.2261577074270233, + "learning_rate": 0.0002685921185684928, + "loss": 2.2828, + "step": 3530 + }, + { + "epoch": 2.2795351839896707, + "grad_norm": 1.119264014744377, + "learning_rate": 0.00026855254813584284, + "loss": 2.0056, + "step": 3531 + }, + { + "epoch": 2.2801807617817946, + "grad_norm": 1.129029859064156, + "learning_rate": 0.0002685129557104644, + "loss": 1.9842, + "step": 3532 + }, + { + "epoch": 2.2808263395739186, + "grad_norm": 1.0920795329327644, + "learning_rate": 0.0002684733412997022, + "loss": 1.7105, + "step": 3533 + }, + { + "epoch": 2.2814719173660425, + "grad_norm": 1.2375118181884603, + "learning_rate": 0.00026843370491090514, + "loss": 2.0744, + "step": 3534 + }, + { + "epoch": 2.2821174951581664, + "grad_norm": 1.054926696088795, + "learning_rate": 0.00026839404655142616, + "loss": 1.6454, + "step": 3535 + }, + { + "epoch": 2.2827630729502904, + "grad_norm": 1.0871992220457067, + "learning_rate": 0.00026835436622862235, + "loss": 1.715, + "step": 3536 + }, + { + "epoch": 2.2834086507424143, + "grad_norm": 1.184573610409458, + "learning_rate": 0.0002683146639498548, + "loss": 2.2777, + "step": 3537 + }, + { + "epoch": 2.284054228534538, + "grad_norm": 1.2160109226846847, + "learning_rate": 0.00026827493972248867, + "loss": 1.863, + "step": 3538 + }, + { + "epoch": 2.284699806326662, + "grad_norm": 1.2355260745460903, + "learning_rate": 0.0002682351935538933, + "loss": 2.226, + "step": 3539 + }, + { + "epoch": 2.285345384118786, + "grad_norm": 1.2783892084974047, + "learning_rate": 0.0002681954254514419, + "loss": 2.3984, + "step": 3540 + }, + { + "epoch": 2.2859909619109104, + "grad_norm": 1.0982695261768287, + "learning_rate": 0.000268155635422512, + "loss": 1.9951, + "step": 3541 + }, + { + "epoch": 2.2866365397030344, + "grad_norm": 1.0949696542501013, + "learning_rate": 0.0002681158234744849, + "loss": 1.9101, + "step": 3542 + }, + { + "epoch": 2.2872821174951583, + "grad_norm": 1.2158452611049402, + "learning_rate": 0.00026807598961474627, + "loss": 2.0765, + "step": 3543 + }, + { + "epoch": 2.2879276952872822, + "grad_norm": 1.2087657994194003, + "learning_rate": 0.0002680361338506856, + "loss": 2.1102, + "step": 3544 + }, + { + "epoch": 2.288573273079406, + "grad_norm": 1.2484586398155986, + "learning_rate": 0.00026799625618969663, + "loss": 2.0078, + "step": 3545 + }, + { + "epoch": 2.28921885087153, + "grad_norm": 1.2245094495260342, + "learning_rate": 0.0002679563566391771, + "loss": 2.0935, + "step": 3546 + }, + { + "epoch": 2.289864428663654, + "grad_norm": 1.2307721364481263, + "learning_rate": 0.0002679164352065286, + "loss": 2.1619, + "step": 3547 + }, + { + "epoch": 2.290510006455778, + "grad_norm": 1.172121614711707, + "learning_rate": 0.0002678764918991572, + "loss": 1.9704, + "step": 3548 + }, + { + "epoch": 2.291155584247902, + "grad_norm": 1.1843433912224186, + "learning_rate": 0.0002678365267244727, + "loss": 2.0568, + "step": 3549 + }, + { + "epoch": 2.291801162040026, + "grad_norm": 1.254636356957208, + "learning_rate": 0.00026779653968988913, + "loss": 2.0899, + "step": 3550 + }, + { + "epoch": 2.2924467398321497, + "grad_norm": 1.2615445243846841, + "learning_rate": 0.00026775653080282435, + "loss": 1.856, + "step": 3551 + }, + { + "epoch": 2.2930923176242737, + "grad_norm": 1.1131764526960521, + "learning_rate": 0.00026771650007070054, + "loss": 1.701, + "step": 3552 + }, + { + "epoch": 2.2937378954163976, + "grad_norm": 1.3960834779873337, + "learning_rate": 0.0002676764475009438, + "loss": 2.08, + "step": 3553 + }, + { + "epoch": 2.2943834732085215, + "grad_norm": 1.185163764977957, + "learning_rate": 0.0002676363731009842, + "loss": 2.1544, + "step": 3554 + }, + { + "epoch": 2.2950290510006455, + "grad_norm": 1.271728138105102, + "learning_rate": 0.0002675962768782561, + "loss": 2.0821, + "step": 3555 + }, + { + "epoch": 2.2956746287927694, + "grad_norm": 1.270050779944581, + "learning_rate": 0.00026755615884019773, + "loss": 2.0111, + "step": 3556 + }, + { + "epoch": 2.2963202065848933, + "grad_norm": 1.2258926632117537, + "learning_rate": 0.00026751601899425135, + "loss": 2.0439, + "step": 3557 + }, + { + "epoch": 2.2969657843770173, + "grad_norm": 1.3553316036867278, + "learning_rate": 0.0002674758573478633, + "loss": 2.3376, + "step": 3558 + }, + { + "epoch": 2.297611362169141, + "grad_norm": 1.1939872241081935, + "learning_rate": 0.0002674356739084841, + "loss": 2.0026, + "step": 3559 + }, + { + "epoch": 2.2982569399612656, + "grad_norm": 1.2771074630262778, + "learning_rate": 0.0002673954686835681, + "loss": 2.1137, + "step": 3560 + }, + { + "epoch": 2.2989025177533895, + "grad_norm": 1.190570515157903, + "learning_rate": 0.00026735524168057376, + "loss": 1.8375, + "step": 3561 + }, + { + "epoch": 2.2995480955455134, + "grad_norm": 1.2150244850630414, + "learning_rate": 0.00026731499290696373, + "loss": 2.1073, + "step": 3562 + }, + { + "epoch": 2.3001936733376374, + "grad_norm": 1.214854455526893, + "learning_rate": 0.00026727472237020447, + "loss": 2.2948, + "step": 3563 + }, + { + "epoch": 2.3008392511297613, + "grad_norm": 1.1845784554901226, + "learning_rate": 0.0002672344300777666, + "loss": 2.0928, + "step": 3564 + }, + { + "epoch": 2.301484828921885, + "grad_norm": 1.183964010901787, + "learning_rate": 0.0002671941160371248, + "loss": 1.7841, + "step": 3565 + }, + { + "epoch": 2.302130406714009, + "grad_norm": 1.1289099128570181, + "learning_rate": 0.00026715378025575764, + "loss": 2.0032, + "step": 3566 + }, + { + "epoch": 2.302775984506133, + "grad_norm": 1.2294212613497453, + "learning_rate": 0.000267113422741148, + "loss": 1.8971, + "step": 3567 + }, + { + "epoch": 2.303421562298257, + "grad_norm": 1.2697945545489375, + "learning_rate": 0.00026707304350078245, + "loss": 2.0927, + "step": 3568 + }, + { + "epoch": 2.304067140090381, + "grad_norm": 1.202253561992811, + "learning_rate": 0.00026703264254215186, + "loss": 1.9303, + "step": 3569 + }, + { + "epoch": 2.304712717882505, + "grad_norm": 1.3747083568531158, + "learning_rate": 0.000266992219872751, + "loss": 2.344, + "step": 3570 + }, + { + "epoch": 2.305358295674629, + "grad_norm": 1.1934190445505126, + "learning_rate": 0.00026695177550007873, + "loss": 2.1338, + "step": 3571 + }, + { + "epoch": 2.3060038734667527, + "grad_norm": 1.153990904204758, + "learning_rate": 0.00026691130943163775, + "loss": 1.9586, + "step": 3572 + }, + { + "epoch": 2.3066494512588767, + "grad_norm": 1.1459250274435808, + "learning_rate": 0.0002668708216749351, + "loss": 2.1715, + "step": 3573 + }, + { + "epoch": 2.3072950290510006, + "grad_norm": 1.1372955673104537, + "learning_rate": 0.00026683031223748166, + "loss": 1.9885, + "step": 3574 + }, + { + "epoch": 2.3079406068431245, + "grad_norm": 1.205014371184582, + "learning_rate": 0.0002667897811267923, + "loss": 1.9202, + "step": 3575 + }, + { + "epoch": 2.3085861846352485, + "grad_norm": 1.145366584651856, + "learning_rate": 0.00026674922835038595, + "loss": 2.0522, + "step": 3576 + }, + { + "epoch": 2.3092317624273724, + "grad_norm": 1.270677266909552, + "learning_rate": 0.0002667086539157856, + "loss": 1.9542, + "step": 3577 + }, + { + "epoch": 2.3098773402194963, + "grad_norm": 1.1172674918096313, + "learning_rate": 0.00026666805783051833, + "loss": 2.048, + "step": 3578 + }, + { + "epoch": 2.3105229180116202, + "grad_norm": 1.2207885270872174, + "learning_rate": 0.00026662744010211493, + "loss": 1.8452, + "step": 3579 + }, + { + "epoch": 2.311168495803744, + "grad_norm": 1.153813483039212, + "learning_rate": 0.0002665868007381105, + "loss": 1.9955, + "step": 3580 + }, + { + "epoch": 2.311814073595868, + "grad_norm": 1.2035438044964644, + "learning_rate": 0.00026654613974604416, + "loss": 1.9905, + "step": 3581 + }, + { + "epoch": 2.312459651387992, + "grad_norm": 1.3642505609530757, + "learning_rate": 0.00026650545713345885, + "loss": 2.2442, + "step": 3582 + }, + { + "epoch": 2.313105229180116, + "grad_norm": 1.3011542744746136, + "learning_rate": 0.0002664647529079016, + "loss": 2.1041, + "step": 3583 + }, + { + "epoch": 2.3137508069722403, + "grad_norm": 1.2320529848937247, + "learning_rate": 0.00026642402707692346, + "loss": 2.1098, + "step": 3584 + }, + { + "epoch": 2.3143963847643643, + "grad_norm": 1.181511399971563, + "learning_rate": 0.0002663832796480796, + "loss": 1.9373, + "step": 3585 + }, + { + "epoch": 2.315041962556488, + "grad_norm": 1.0743409356717102, + "learning_rate": 0.00026634251062892904, + "loss": 1.9386, + "step": 3586 + }, + { + "epoch": 2.315687540348612, + "grad_norm": 1.152709505131503, + "learning_rate": 0.0002663017200270348, + "loss": 1.9844, + "step": 3587 + }, + { + "epoch": 2.316333118140736, + "grad_norm": 1.1066906221145278, + "learning_rate": 0.000266260907849964, + "loss": 2.115, + "step": 3588 + }, + { + "epoch": 2.31697869593286, + "grad_norm": 1.0923556849895022, + "learning_rate": 0.0002662200741052877, + "loss": 1.8846, + "step": 3589 + }, + { + "epoch": 2.317624273724984, + "grad_norm": 1.2687011082196593, + "learning_rate": 0.0002661792188005811, + "loss": 2.1082, + "step": 3590 + }, + { + "epoch": 2.318269851517108, + "grad_norm": 1.1432726936988071, + "learning_rate": 0.0002661383419434231, + "loss": 1.8805, + "step": 3591 + }, + { + "epoch": 2.318915429309232, + "grad_norm": 1.2095019970817777, + "learning_rate": 0.0002660974435413969, + "loss": 2.2193, + "step": 3592 + }, + { + "epoch": 2.3195610071013557, + "grad_norm": 1.1847272334870684, + "learning_rate": 0.0002660565236020895, + "loss": 2.0672, + "step": 3593 + }, + { + "epoch": 2.3202065848934796, + "grad_norm": 1.146804498716825, + "learning_rate": 0.0002660155821330921, + "loss": 2.159, + "step": 3594 + }, + { + "epoch": 2.3208521626856036, + "grad_norm": 1.118104570520286, + "learning_rate": 0.00026597461914199964, + "loss": 2.0632, + "step": 3595 + }, + { + "epoch": 2.3214977404777275, + "grad_norm": 1.2045581831840844, + "learning_rate": 0.0002659336346364113, + "loss": 2.1327, + "step": 3596 + }, + { + "epoch": 2.3221433182698514, + "grad_norm": 1.1717263369789297, + "learning_rate": 0.00026589262862393, + "loss": 2.0079, + "step": 3597 + }, + { + "epoch": 2.3227888960619754, + "grad_norm": 1.2710574708883364, + "learning_rate": 0.00026585160111216287, + "loss": 1.9838, + "step": 3598 + }, + { + "epoch": 2.3234344738540993, + "grad_norm": 1.1590524310220078, + "learning_rate": 0.0002658105521087209, + "loss": 2.1016, + "step": 3599 + }, + { + "epoch": 2.324080051646223, + "grad_norm": 1.109863374178809, + "learning_rate": 0.0002657694816212191, + "loss": 2.1164, + "step": 3600 + }, + { + "epoch": 2.324725629438347, + "grad_norm": 1.0812015043331518, + "learning_rate": 0.0002657283896572765, + "loss": 2.0906, + "step": 3601 + }, + { + "epoch": 2.325371207230471, + "grad_norm": 1.1699833752870095, + "learning_rate": 0.00026568727622451604, + "loss": 1.7935, + "step": 3602 + }, + { + "epoch": 2.3260167850225955, + "grad_norm": 1.2239899577195146, + "learning_rate": 0.00026564614133056484, + "loss": 2.1932, + "step": 3603 + }, + { + "epoch": 2.3266623628147194, + "grad_norm": 1.1747950324372134, + "learning_rate": 0.00026560498498305363, + "loss": 2.0986, + "step": 3604 + }, + { + "epoch": 2.3273079406068433, + "grad_norm": 1.2617967260012046, + "learning_rate": 0.0002655638071896175, + "loss": 2.0988, + "step": 3605 + }, + { + "epoch": 2.3279535183989672, + "grad_norm": 1.1875441557453925, + "learning_rate": 0.00026552260795789526, + "loss": 2.1399, + "step": 3606 + }, + { + "epoch": 2.328599096191091, + "grad_norm": 1.0437093131789807, + "learning_rate": 0.0002654813872955299, + "loss": 1.9305, + "step": 3607 + }, + { + "epoch": 2.329244673983215, + "grad_norm": 1.1564519193735787, + "learning_rate": 0.0002654401452101682, + "loss": 2.0744, + "step": 3608 + }, + { + "epoch": 2.329890251775339, + "grad_norm": 1.1025125386539592, + "learning_rate": 0.00026539888170946094, + "loss": 2.159, + "step": 3609 + }, + { + "epoch": 2.330535829567463, + "grad_norm": 1.3035949881267619, + "learning_rate": 0.0002653575968010631, + "loss": 2.0831, + "step": 3610 + }, + { + "epoch": 2.331181407359587, + "grad_norm": 1.1086477522980278, + "learning_rate": 0.00026531629049263334, + "loss": 1.8998, + "step": 3611 + }, + { + "epoch": 2.331826985151711, + "grad_norm": 1.1865937191305522, + "learning_rate": 0.0002652749627918344, + "loss": 1.9984, + "step": 3612 + }, + { + "epoch": 2.3324725629438348, + "grad_norm": 1.1119148796517548, + "learning_rate": 0.00026523361370633307, + "loss": 1.9235, + "step": 3613 + }, + { + "epoch": 2.3331181407359587, + "grad_norm": 1.193039297379646, + "learning_rate": 0.0002651922432437999, + "loss": 1.9945, + "step": 3614 + }, + { + "epoch": 2.3337637185280826, + "grad_norm": 1.092573106121022, + "learning_rate": 0.00026515085141190975, + "loss": 1.7451, + "step": 3615 + }, + { + "epoch": 2.3344092963202066, + "grad_norm": 1.2650128145257242, + "learning_rate": 0.00026510943821834104, + "loss": 2.0543, + "step": 3616 + }, + { + "epoch": 2.3350548741123305, + "grad_norm": 1.142637042239014, + "learning_rate": 0.00026506800367077644, + "loss": 1.9937, + "step": 3617 + }, + { + "epoch": 2.3357004519044544, + "grad_norm": 1.2312755285796324, + "learning_rate": 0.00026502654777690247, + "loss": 2.1335, + "step": 3618 + }, + { + "epoch": 2.3363460296965783, + "grad_norm": 1.178672402936217, + "learning_rate": 0.00026498507054440964, + "loss": 1.9462, + "step": 3619 + }, + { + "epoch": 2.3369916074887023, + "grad_norm": 1.2321420740110989, + "learning_rate": 0.0002649435719809924, + "loss": 2.1097, + "step": 3620 + }, + { + "epoch": 2.337637185280826, + "grad_norm": 1.121624868834306, + "learning_rate": 0.0002649020520943492, + "loss": 1.8691, + "step": 3621 + }, + { + "epoch": 2.33828276307295, + "grad_norm": 1.1905694801165299, + "learning_rate": 0.00026486051089218235, + "loss": 2.0146, + "step": 3622 + }, + { + "epoch": 2.338928340865074, + "grad_norm": 1.2366709244634506, + "learning_rate": 0.00026481894838219813, + "loss": 2.0889, + "step": 3623 + }, + { + "epoch": 2.339573918657198, + "grad_norm": 1.0953885336927243, + "learning_rate": 0.000264777364572107, + "loss": 1.7782, + "step": 3624 + }, + { + "epoch": 2.340219496449322, + "grad_norm": 1.1016911406683412, + "learning_rate": 0.00026473575946962303, + "loss": 2.0919, + "step": 3625 + }, + { + "epoch": 2.340865074241446, + "grad_norm": 1.1181913430468164, + "learning_rate": 0.0002646941330824644, + "loss": 2.0713, + "step": 3626 + }, + { + "epoch": 2.3415106520335702, + "grad_norm": 1.131359995162374, + "learning_rate": 0.00026465248541835334, + "loss": 1.7257, + "step": 3627 + }, + { + "epoch": 2.342156229825694, + "grad_norm": 1.3024750533122458, + "learning_rate": 0.0002646108164850158, + "loss": 2.158, + "step": 3628 + }, + { + "epoch": 2.342801807617818, + "grad_norm": 1.2335603699013122, + "learning_rate": 0.0002645691262901819, + "loss": 1.9741, + "step": 3629 + }, + { + "epoch": 2.343447385409942, + "grad_norm": 1.1802470602866093, + "learning_rate": 0.0002645274148415856, + "loss": 2.0679, + "step": 3630 + }, + { + "epoch": 2.344092963202066, + "grad_norm": 1.2727698173996942, + "learning_rate": 0.0002644856821469647, + "loss": 2.1093, + "step": 3631 + }, + { + "epoch": 2.34473854099419, + "grad_norm": 1.1903492464103556, + "learning_rate": 0.00026444392821406117, + "loss": 2.0698, + "step": 3632 + }, + { + "epoch": 2.345384118786314, + "grad_norm": 1.2714424084607254, + "learning_rate": 0.00026440215305062074, + "loss": 2.1343, + "step": 3633 + }, + { + "epoch": 2.3460296965784377, + "grad_norm": 1.0893313384527321, + "learning_rate": 0.00026436035666439317, + "loss": 1.9329, + "step": 3634 + }, + { + "epoch": 2.3466752743705617, + "grad_norm": 1.2040708364826485, + "learning_rate": 0.0002643185390631321, + "loss": 2.1844, + "step": 3635 + }, + { + "epoch": 2.3473208521626856, + "grad_norm": 1.1636051521079476, + "learning_rate": 0.00026427670025459506, + "loss": 1.9541, + "step": 3636 + }, + { + "epoch": 2.3479664299548095, + "grad_norm": 1.0921338563950493, + "learning_rate": 0.00026423484024654375, + "loss": 2.017, + "step": 3637 + }, + { + "epoch": 2.3486120077469335, + "grad_norm": 1.1981343460389418, + "learning_rate": 0.0002641929590467435, + "loss": 2.0781, + "step": 3638 + }, + { + "epoch": 2.3492575855390574, + "grad_norm": 1.2145712051357873, + "learning_rate": 0.0002641510566629637, + "loss": 2.2375, + "step": 3639 + }, + { + "epoch": 2.3499031633311813, + "grad_norm": 1.2091857624669484, + "learning_rate": 0.00026410913310297775, + "loss": 2.0561, + "step": 3640 + }, + { + "epoch": 2.3505487411233053, + "grad_norm": 1.292323332248101, + "learning_rate": 0.00026406718837456296, + "loss": 2.2834, + "step": 3641 + }, + { + "epoch": 2.351194318915429, + "grad_norm": 1.2260173408432025, + "learning_rate": 0.0002640252224855003, + "loss": 2.1976, + "step": 3642 + }, + { + "epoch": 2.351839896707553, + "grad_norm": 1.2964738048676956, + "learning_rate": 0.0002639832354435751, + "loss": 2.1269, + "step": 3643 + }, + { + "epoch": 2.352485474499677, + "grad_norm": 1.2509943969522848, + "learning_rate": 0.0002639412272565763, + "loss": 2.0324, + "step": 3644 + }, + { + "epoch": 2.353131052291801, + "grad_norm": 1.1960761005297813, + "learning_rate": 0.00026389919793229686, + "loss": 2.0182, + "step": 3645 + }, + { + "epoch": 2.3537766300839253, + "grad_norm": 1.213591317691436, + "learning_rate": 0.0002638571474785336, + "loss": 2.1701, + "step": 3646 + }, + { + "epoch": 2.3544222078760493, + "grad_norm": 1.1733810298989904, + "learning_rate": 0.00026381507590308734, + "loss": 2.2715, + "step": 3647 + }, + { + "epoch": 2.355067785668173, + "grad_norm": 1.1631113824679233, + "learning_rate": 0.0002637729832137629, + "loss": 2.1902, + "step": 3648 + }, + { + "epoch": 2.355713363460297, + "grad_norm": 1.2135021610473797, + "learning_rate": 0.0002637308694183688, + "loss": 2.0848, + "step": 3649 + }, + { + "epoch": 2.356358941252421, + "grad_norm": 1.1358846566105638, + "learning_rate": 0.0002636887345247177, + "loss": 2.1447, + "step": 3650 + }, + { + "epoch": 2.357004519044545, + "grad_norm": 1.220514988656765, + "learning_rate": 0.0002636465785406259, + "loss": 2.0664, + "step": 3651 + }, + { + "epoch": 2.357650096836669, + "grad_norm": 1.2158741934678772, + "learning_rate": 0.0002636044014739138, + "loss": 1.9318, + "step": 3652 + }, + { + "epoch": 2.358295674628793, + "grad_norm": 1.1331409154728482, + "learning_rate": 0.0002635622033324058, + "loss": 1.9895, + "step": 3653 + }, + { + "epoch": 2.358941252420917, + "grad_norm": 1.1366513343188267, + "learning_rate": 0.00026351998412393006, + "loss": 1.839, + "step": 3654 + }, + { + "epoch": 2.3595868302130407, + "grad_norm": 1.1174070406502916, + "learning_rate": 0.00026347774385631863, + "loss": 1.6796, + "step": 3655 + }, + { + "epoch": 2.3602324080051647, + "grad_norm": 1.1599484390045556, + "learning_rate": 0.00026343548253740756, + "loss": 1.5928, + "step": 3656 + }, + { + "epoch": 2.3608779857972886, + "grad_norm": 1.2568562355254964, + "learning_rate": 0.0002633932001750367, + "loss": 2.0213, + "step": 3657 + }, + { + "epoch": 2.3615235635894125, + "grad_norm": 1.3074265263622618, + "learning_rate": 0.00026335089677704995, + "loss": 1.8587, + "step": 3658 + }, + { + "epoch": 2.3621691413815364, + "grad_norm": 1.1187581576595034, + "learning_rate": 0.00026330857235129496, + "loss": 2.1525, + "step": 3659 + }, + { + "epoch": 2.3628147191736604, + "grad_norm": 1.2569850898832968, + "learning_rate": 0.0002632662269056234, + "loss": 2.203, + "step": 3660 + }, + { + "epoch": 2.3634602969657843, + "grad_norm": 1.2501611629187541, + "learning_rate": 0.00026322386044789076, + "loss": 2.0926, + "step": 3661 + }, + { + "epoch": 2.3641058747579082, + "grad_norm": 1.2133913233555107, + "learning_rate": 0.0002631814729859564, + "loss": 1.7021, + "step": 3662 + }, + { + "epoch": 2.364751452550032, + "grad_norm": 1.2809552867734684, + "learning_rate": 0.00026313906452768383, + "loss": 2.3095, + "step": 3663 + }, + { + "epoch": 2.365397030342156, + "grad_norm": 1.2246953470651725, + "learning_rate": 0.0002630966350809401, + "loss": 2.0084, + "step": 3664 + }, + { + "epoch": 2.36604260813428, + "grad_norm": 1.1460844224371673, + "learning_rate": 0.00026305418465359627, + "loss": 2.0952, + "step": 3665 + }, + { + "epoch": 2.366688185926404, + "grad_norm": 1.216149459601313, + "learning_rate": 0.0002630117132535275, + "loss": 2.1168, + "step": 3666 + }, + { + "epoch": 2.367333763718528, + "grad_norm": 1.178669244766579, + "learning_rate": 0.0002629692208886125, + "loss": 2.303, + "step": 3667 + }, + { + "epoch": 2.367979341510652, + "grad_norm": 1.1381373132806318, + "learning_rate": 0.00026292670756673423, + "loss": 1.7701, + "step": 3668 + }, + { + "epoch": 2.3686249193027757, + "grad_norm": 1.2360335413722734, + "learning_rate": 0.00026288417329577916, + "loss": 2.1547, + "step": 3669 + }, + { + "epoch": 2.3692704970949, + "grad_norm": 1.1078676562426644, + "learning_rate": 0.00026284161808363797, + "loss": 1.6157, + "step": 3670 + }, + { + "epoch": 2.369916074887024, + "grad_norm": 1.051346482942431, + "learning_rate": 0.00026279904193820505, + "loss": 1.9031, + "step": 3671 + }, + { + "epoch": 2.370561652679148, + "grad_norm": 1.2723852924952068, + "learning_rate": 0.0002627564448673787, + "loss": 2.0468, + "step": 3672 + }, + { + "epoch": 2.371207230471272, + "grad_norm": 1.0891061531095474, + "learning_rate": 0.0002627138268790612, + "loss": 1.6823, + "step": 3673 + }, + { + "epoch": 2.371852808263396, + "grad_norm": 1.25510704602249, + "learning_rate": 0.00026267118798115855, + "loss": 2.1575, + "step": 3674 + }, + { + "epoch": 2.3724983860555198, + "grad_norm": 1.2567253086272636, + "learning_rate": 0.0002626285281815807, + "loss": 2.0875, + "step": 3675 + }, + { + "epoch": 2.3731439638476437, + "grad_norm": 1.1452612667607003, + "learning_rate": 0.00026258584748824155, + "loss": 1.7032, + "step": 3676 + }, + { + "epoch": 2.3737895416397676, + "grad_norm": 1.2701180449067802, + "learning_rate": 0.00026254314590905877, + "loss": 2.363, + "step": 3677 + }, + { + "epoch": 2.3744351194318916, + "grad_norm": 1.2490393030995255, + "learning_rate": 0.00026250042345195393, + "loss": 2.0705, + "step": 3678 + }, + { + "epoch": 2.3750806972240155, + "grad_norm": 1.0742604565193714, + "learning_rate": 0.00026245768012485255, + "loss": 1.9589, + "step": 3679 + }, + { + "epoch": 2.3757262750161394, + "grad_norm": 1.3133185454903278, + "learning_rate": 0.00026241491593568384, + "loss": 2.1629, + "step": 3680 + }, + { + "epoch": 2.3763718528082634, + "grad_norm": 1.2297890750549934, + "learning_rate": 0.00026237213089238116, + "loss": 2.1134, + "step": 3681 + }, + { + "epoch": 2.3770174306003873, + "grad_norm": 1.3141260028474768, + "learning_rate": 0.0002623293250028815, + "loss": 2.0156, + "step": 3682 + }, + { + "epoch": 2.377663008392511, + "grad_norm": 1.2879547170330443, + "learning_rate": 0.0002622864982751258, + "loss": 2.0536, + "step": 3683 + }, + { + "epoch": 2.378308586184635, + "grad_norm": 1.3416696490154296, + "learning_rate": 0.0002622436507170588, + "loss": 2.0962, + "step": 3684 + }, + { + "epoch": 2.378954163976759, + "grad_norm": 1.106807110006748, + "learning_rate": 0.00026220078233662927, + "loss": 2.0478, + "step": 3685 + }, + { + "epoch": 2.379599741768883, + "grad_norm": 1.2365003303346962, + "learning_rate": 0.0002621578931417897, + "loss": 2.0749, + "step": 3686 + }, + { + "epoch": 2.380245319561007, + "grad_norm": 1.2808315810341147, + "learning_rate": 0.0002621149831404965, + "loss": 2.0723, + "step": 3687 + }, + { + "epoch": 2.380890897353131, + "grad_norm": 1.2095148223949, + "learning_rate": 0.0002620720523407099, + "loss": 1.9815, + "step": 3688 + }, + { + "epoch": 2.3815364751452552, + "grad_norm": 1.2316588387683671, + "learning_rate": 0.000262029100750394, + "loss": 2.3085, + "step": 3689 + }, + { + "epoch": 2.382182052937379, + "grad_norm": 1.257766614758679, + "learning_rate": 0.0002619861283775168, + "loss": 1.9339, + "step": 3690 + }, + { + "epoch": 2.382827630729503, + "grad_norm": 1.2144653497770566, + "learning_rate": 0.0002619431352300501, + "loss": 2.04, + "step": 3691 + }, + { + "epoch": 2.383473208521627, + "grad_norm": 1.18900529608665, + "learning_rate": 0.0002619001213159695, + "loss": 1.9979, + "step": 3692 + }, + { + "epoch": 2.384118786313751, + "grad_norm": 1.2682786904002827, + "learning_rate": 0.0002618570866432547, + "loss": 2.235, + "step": 3693 + }, + { + "epoch": 2.384764364105875, + "grad_norm": 1.1321342771227318, + "learning_rate": 0.0002618140312198889, + "loss": 1.8987, + "step": 3694 + }, + { + "epoch": 2.385409941897999, + "grad_norm": 1.7158020610822953, + "learning_rate": 0.00026177095505385944, + "loss": 2.1504, + "step": 3695 + }, + { + "epoch": 2.3860555196901228, + "grad_norm": 1.0838916910597827, + "learning_rate": 0.0002617278581531574, + "loss": 1.7787, + "step": 3696 + }, + { + "epoch": 2.3867010974822467, + "grad_norm": 1.1963968961614744, + "learning_rate": 0.0002616847405257776, + "loss": 2.2073, + "step": 3697 + }, + { + "epoch": 2.3873466752743706, + "grad_norm": 1.1109229882061484, + "learning_rate": 0.0002616416021797189, + "loss": 2.0231, + "step": 3698 + }, + { + "epoch": 2.3879922530664945, + "grad_norm": 1.087157225141398, + "learning_rate": 0.0002615984431229839, + "loss": 1.949, + "step": 3699 + }, + { + "epoch": 2.3886378308586185, + "grad_norm": 1.2879906134540513, + "learning_rate": 0.000261555263363579, + "loss": 2.334, + "step": 3700 + }, + { + "epoch": 2.3892834086507424, + "grad_norm": 1.137765603089143, + "learning_rate": 0.00026151206290951455, + "loss": 2.0532, + "step": 3701 + }, + { + "epoch": 2.3899289864428663, + "grad_norm": 1.2236961553194348, + "learning_rate": 0.00026146884176880467, + "loss": 2.0847, + "step": 3702 + }, + { + "epoch": 2.3905745642349903, + "grad_norm": 1.194729991789781, + "learning_rate": 0.0002614255999494673, + "loss": 2.0002, + "step": 3703 + }, + { + "epoch": 2.391220142027114, + "grad_norm": 1.2824857566584973, + "learning_rate": 0.0002613823374595243, + "loss": 2.134, + "step": 3704 + }, + { + "epoch": 2.391865719819238, + "grad_norm": 1.1484620154691714, + "learning_rate": 0.0002613390543070013, + "loss": 2.0209, + "step": 3705 + }, + { + "epoch": 2.392511297611362, + "grad_norm": 1.3206678954747397, + "learning_rate": 0.0002612957504999277, + "loss": 2.1919, + "step": 3706 + }, + { + "epoch": 2.393156875403486, + "grad_norm": 1.0994379994755246, + "learning_rate": 0.00026125242604633695, + "loss": 1.7439, + "step": 3707 + }, + { + "epoch": 2.39380245319561, + "grad_norm": 1.1738498439295626, + "learning_rate": 0.000261209080954266, + "loss": 2.007, + "step": 3708 + }, + { + "epoch": 2.394448030987734, + "grad_norm": 1.2777938042357282, + "learning_rate": 0.000261165715231756, + "loss": 2.246, + "step": 3709 + }, + { + "epoch": 2.3950936087798578, + "grad_norm": 1.2458642169165326, + "learning_rate": 0.0002611223288868516, + "loss": 2.0628, + "step": 3710 + }, + { + "epoch": 2.3957391865719817, + "grad_norm": 1.267566247276304, + "learning_rate": 0.00026107892192760146, + "loss": 2.1504, + "step": 3711 + }, + { + "epoch": 2.3963847643641056, + "grad_norm": 1.1171111576184627, + "learning_rate": 0.0002610354943620581, + "loss": 1.7847, + "step": 3712 + }, + { + "epoch": 2.39703034215623, + "grad_norm": 1.3616917967959845, + "learning_rate": 0.0002609920461982776, + "loss": 1.7989, + "step": 3713 + }, + { + "epoch": 2.397675919948354, + "grad_norm": 1.1875631043758694, + "learning_rate": 0.0002609485774443203, + "loss": 1.9966, + "step": 3714 + }, + { + "epoch": 2.398321497740478, + "grad_norm": 1.26483531420565, + "learning_rate": 0.00026090508810824983, + "loss": 2.2368, + "step": 3715 + }, + { + "epoch": 2.398967075532602, + "grad_norm": 1.1023904385803969, + "learning_rate": 0.00026086157819813416, + "loss": 2.0025, + "step": 3716 + }, + { + "epoch": 2.3996126533247257, + "grad_norm": 1.2690999627549557, + "learning_rate": 0.00026081804772204464, + "loss": 2.3009, + "step": 3717 + }, + { + "epoch": 2.4002582311168497, + "grad_norm": 1.2546584030771424, + "learning_rate": 0.0002607744966880568, + "loss": 1.9429, + "step": 3718 + }, + { + "epoch": 2.4009038089089736, + "grad_norm": 1.153626002607054, + "learning_rate": 0.00026073092510424964, + "loss": 2.0426, + "step": 3719 + }, + { + "epoch": 2.4015493867010975, + "grad_norm": 1.1926145500389147, + "learning_rate": 0.00026068733297870625, + "loss": 2.1616, + "step": 3720 + }, + { + "epoch": 2.4021949644932215, + "grad_norm": 1.2272588335275123, + "learning_rate": 0.0002606437203195134, + "loss": 2.1684, + "step": 3721 + }, + { + "epoch": 2.4028405422853454, + "grad_norm": 1.1239721420007114, + "learning_rate": 0.00026060008713476164, + "loss": 1.9668, + "step": 3722 + }, + { + "epoch": 2.4034861200774693, + "grad_norm": 1.1723765111526379, + "learning_rate": 0.0002605564334325455, + "loss": 2.0805, + "step": 3723 + }, + { + "epoch": 2.4041316978695932, + "grad_norm": 1.2715014616777025, + "learning_rate": 0.000260512759220963, + "loss": 2.1934, + "step": 3724 + }, + { + "epoch": 2.404777275661717, + "grad_norm": 1.1647403053925403, + "learning_rate": 0.0002604690645081164, + "loss": 1.7513, + "step": 3725 + }, + { + "epoch": 2.405422853453841, + "grad_norm": 1.1659160655908498, + "learning_rate": 0.00026042534930211127, + "loss": 1.929, + "step": 3726 + }, + { + "epoch": 2.406068431245965, + "grad_norm": 1.1678784489781817, + "learning_rate": 0.00026038161361105743, + "loss": 2.0457, + "step": 3727 + }, + { + "epoch": 2.406714009038089, + "grad_norm": 1.2124108400489135, + "learning_rate": 0.00026033785744306823, + "loss": 1.9096, + "step": 3728 + }, + { + "epoch": 2.407359586830213, + "grad_norm": 1.159000440920242, + "learning_rate": 0.00026029408080626093, + "loss": 2.0763, + "step": 3729 + }, + { + "epoch": 2.408005164622337, + "grad_norm": 1.2060481925275168, + "learning_rate": 0.0002602502837087564, + "loss": 2.0471, + "step": 3730 + }, + { + "epoch": 2.4086507424144608, + "grad_norm": 1.192282082329055, + "learning_rate": 0.0002602064661586797, + "loss": 2.0183, + "step": 3731 + }, + { + "epoch": 2.409296320206585, + "grad_norm": 1.1977848799265247, + "learning_rate": 0.0002601626281641593, + "loss": 1.9352, + "step": 3732 + }, + { + "epoch": 2.409941897998709, + "grad_norm": 1.0750022949103932, + "learning_rate": 0.0002601187697333276, + "loss": 1.9391, + "step": 3733 + }, + { + "epoch": 2.410587475790833, + "grad_norm": 1.2208207809522873, + "learning_rate": 0.0002600748908743208, + "loss": 2.0963, + "step": 3734 + }, + { + "epoch": 2.411233053582957, + "grad_norm": 1.1633761417678763, + "learning_rate": 0.00026003099159527894, + "loss": 2.0062, + "step": 3735 + }, + { + "epoch": 2.411878631375081, + "grad_norm": 1.059578911033838, + "learning_rate": 0.0002599870719043457, + "loss": 2.0308, + "step": 3736 + }, + { + "epoch": 2.412524209167205, + "grad_norm": 1.1851631886514917, + "learning_rate": 0.0002599431318096687, + "loss": 2.1523, + "step": 3737 + }, + { + "epoch": 2.4131697869593287, + "grad_norm": 1.1939738182121806, + "learning_rate": 0.0002598991713193993, + "loss": 2.1555, + "step": 3738 + }, + { + "epoch": 2.4138153647514526, + "grad_norm": 1.151028635839415, + "learning_rate": 0.0002598551904416926, + "loss": 1.8981, + "step": 3739 + }, + { + "epoch": 2.4144609425435766, + "grad_norm": 1.2763832832430648, + "learning_rate": 0.00025981118918470743, + "loss": 2.1147, + "step": 3740 + }, + { + "epoch": 2.4151065203357005, + "grad_norm": 1.1699699925511504, + "learning_rate": 0.00025976716755660666, + "loss": 2.085, + "step": 3741 + }, + { + "epoch": 2.4157520981278244, + "grad_norm": 1.3658677089648275, + "learning_rate": 0.0002597231255655566, + "loss": 2.273, + "step": 3742 + }, + { + "epoch": 2.4163976759199484, + "grad_norm": 1.3057702128520143, + "learning_rate": 0.0002596790632197276, + "loss": 2.1888, + "step": 3743 + }, + { + "epoch": 2.4170432537120723, + "grad_norm": 1.1093731382231313, + "learning_rate": 0.0002596349805272936, + "loss": 2.035, + "step": 3744 + }, + { + "epoch": 2.4176888315041962, + "grad_norm": 1.1995255872006934, + "learning_rate": 0.0002595908774964325, + "loss": 2.271, + "step": 3745 + }, + { + "epoch": 2.41833440929632, + "grad_norm": 1.2269081371782282, + "learning_rate": 0.00025954675413532573, + "loss": 2.2009, + "step": 3746 + }, + { + "epoch": 2.418979987088444, + "grad_norm": 1.1371307675873834, + "learning_rate": 0.00025950261045215865, + "loss": 1.9143, + "step": 3747 + }, + { + "epoch": 2.419625564880568, + "grad_norm": 1.2408313161031597, + "learning_rate": 0.00025945844645512055, + "loss": 2.09, + "step": 3748 + }, + { + "epoch": 2.420271142672692, + "grad_norm": 1.2645011272308098, + "learning_rate": 0.0002594142621524042, + "loss": 2.1195, + "step": 3749 + }, + { + "epoch": 2.420916720464816, + "grad_norm": 1.0700847713414436, + "learning_rate": 0.00025937005755220614, + "loss": 1.7259, + "step": 3750 + }, + { + "epoch": 2.42156229825694, + "grad_norm": 1.1353930477885312, + "learning_rate": 0.00025932583266272694, + "loss": 2.0642, + "step": 3751 + }, + { + "epoch": 2.4222078760490637, + "grad_norm": 1.1778223857299313, + "learning_rate": 0.0002592815874921707, + "loss": 1.5947, + "step": 3752 + }, + { + "epoch": 2.4228534538411877, + "grad_norm": 1.0916880142243175, + "learning_rate": 0.0002592373220487453, + "loss": 2.1413, + "step": 3753 + }, + { + "epoch": 2.4234990316333116, + "grad_norm": 1.2064434030129272, + "learning_rate": 0.0002591930363406626, + "loss": 1.8984, + "step": 3754 + }, + { + "epoch": 2.4241446094254355, + "grad_norm": 1.1657000930700838, + "learning_rate": 0.00025914873037613794, + "loss": 2.0618, + "step": 3755 + }, + { + "epoch": 2.42479018721756, + "grad_norm": 1.1461074818743937, + "learning_rate": 0.00025910440416339054, + "loss": 2.1023, + "step": 3756 + }, + { + "epoch": 2.425435765009684, + "grad_norm": 1.1498719611707815, + "learning_rate": 0.0002590600577106435, + "loss": 1.8327, + "step": 3757 + }, + { + "epoch": 2.4260813428018078, + "grad_norm": 1.138202333003467, + "learning_rate": 0.00025901569102612334, + "loss": 2.1785, + "step": 3758 + }, + { + "epoch": 2.4267269205939317, + "grad_norm": 1.2079749404325266, + "learning_rate": 0.0002589713041180607, + "loss": 2.0529, + "step": 3759 + }, + { + "epoch": 2.4273724983860556, + "grad_norm": 1.2602020235602598, + "learning_rate": 0.0002589268969946897, + "loss": 2.0441, + "step": 3760 + }, + { + "epoch": 2.4280180761781796, + "grad_norm": 1.1450353362832324, + "learning_rate": 0.0002588824696642484, + "loss": 2.0368, + "step": 3761 + }, + { + "epoch": 2.4286636539703035, + "grad_norm": 1.464416787277582, + "learning_rate": 0.00025883802213497853, + "loss": 2.0866, + "step": 3762 + }, + { + "epoch": 2.4293092317624274, + "grad_norm": 1.1719195305220504, + "learning_rate": 0.0002587935544151256, + "loss": 2.1317, + "step": 3763 + }, + { + "epoch": 2.4299548095545513, + "grad_norm": 1.1717195939975524, + "learning_rate": 0.0002587490665129387, + "loss": 1.8636, + "step": 3764 + }, + { + "epoch": 2.4306003873466753, + "grad_norm": 1.1716469166654002, + "learning_rate": 0.00025870455843667094, + "loss": 1.9739, + "step": 3765 + }, + { + "epoch": 2.431245965138799, + "grad_norm": 1.2345346764941194, + "learning_rate": 0.00025866003019457895, + "loss": 1.9825, + "step": 3766 + }, + { + "epoch": 2.431891542930923, + "grad_norm": 1.2743729031047852, + "learning_rate": 0.00025861548179492326, + "loss": 2.0297, + "step": 3767 + }, + { + "epoch": 2.432537120723047, + "grad_norm": 1.2928936451423358, + "learning_rate": 0.0002585709132459679, + "loss": 2.135, + "step": 3768 + }, + { + "epoch": 2.433182698515171, + "grad_norm": 1.1543218479758195, + "learning_rate": 0.000258526324555981, + "loss": 1.8778, + "step": 3769 + }, + { + "epoch": 2.433828276307295, + "grad_norm": 1.1332633425726597, + "learning_rate": 0.0002584817157332341, + "loss": 2.0218, + "step": 3770 + }, + { + "epoch": 2.434473854099419, + "grad_norm": 1.3783890362873836, + "learning_rate": 0.00025843708678600257, + "loss": 2.3141, + "step": 3771 + }, + { + "epoch": 2.435119431891543, + "grad_norm": 1.186098174395566, + "learning_rate": 0.00025839243772256563, + "loss": 2.078, + "step": 3772 + }, + { + "epoch": 2.4357650096836667, + "grad_norm": 1.0884833367125426, + "learning_rate": 0.00025834776855120615, + "loss": 2.0194, + "step": 3773 + }, + { + "epoch": 2.4364105874757906, + "grad_norm": 1.16985056959979, + "learning_rate": 0.00025830307928021063, + "loss": 1.916, + "step": 3774 + }, + { + "epoch": 2.437056165267915, + "grad_norm": 1.0947247082173381, + "learning_rate": 0.0002582583699178696, + "loss": 1.7116, + "step": 3775 + }, + { + "epoch": 2.437701743060039, + "grad_norm": 1.1998855679531182, + "learning_rate": 0.0002582136404724768, + "loss": 1.7021, + "step": 3776 + }, + { + "epoch": 2.438347320852163, + "grad_norm": 1.386014116996479, + "learning_rate": 0.0002581688909523302, + "loss": 2.1817, + "step": 3777 + }, + { + "epoch": 2.438992898644287, + "grad_norm": 1.2109487303691744, + "learning_rate": 0.00025812412136573136, + "loss": 2.171, + "step": 3778 + }, + { + "epoch": 2.4396384764364107, + "grad_norm": 1.1927392649872026, + "learning_rate": 0.00025807933172098535, + "loss": 2.2193, + "step": 3779 + }, + { + "epoch": 2.4402840542285347, + "grad_norm": 1.283198751557099, + "learning_rate": 0.0002580345220264012, + "loss": 2.0231, + "step": 3780 + }, + { + "epoch": 2.4409296320206586, + "grad_norm": 1.109025153567203, + "learning_rate": 0.0002579896922902915, + "loss": 1.8947, + "step": 3781 + }, + { + "epoch": 2.4415752098127825, + "grad_norm": 1.1991142105784542, + "learning_rate": 0.00025794484252097277, + "loss": 2.0574, + "step": 3782 + }, + { + "epoch": 2.4422207876049065, + "grad_norm": 1.1692019714814332, + "learning_rate": 0.00025789997272676493, + "loss": 1.8337, + "step": 3783 + }, + { + "epoch": 2.4428663653970304, + "grad_norm": 1.3634487017690764, + "learning_rate": 0.000257855082915992, + "loss": 2.2834, + "step": 3784 + }, + { + "epoch": 2.4435119431891543, + "grad_norm": 1.1209108309793774, + "learning_rate": 0.00025781017309698133, + "loss": 1.5232, + "step": 3785 + }, + { + "epoch": 2.4441575209812783, + "grad_norm": 1.2045667010527032, + "learning_rate": 0.00025776524327806417, + "loss": 1.9725, + "step": 3786 + }, + { + "epoch": 2.444803098773402, + "grad_norm": 1.3070581169654796, + "learning_rate": 0.00025772029346757556, + "loss": 2.1168, + "step": 3787 + }, + { + "epoch": 2.445448676565526, + "grad_norm": 1.2871674547939362, + "learning_rate": 0.0002576753236738541, + "loss": 2.1655, + "step": 3788 + }, + { + "epoch": 2.44609425435765, + "grad_norm": 1.2543444262320242, + "learning_rate": 0.0002576303339052422, + "loss": 1.7995, + "step": 3789 + }, + { + "epoch": 2.446739832149774, + "grad_norm": 1.156857708706668, + "learning_rate": 0.0002575853241700859, + "loss": 2.0436, + "step": 3790 + }, + { + "epoch": 2.447385409941898, + "grad_norm": 1.0943414094864838, + "learning_rate": 0.00025754029447673494, + "loss": 2.0989, + "step": 3791 + }, + { + "epoch": 2.448030987734022, + "grad_norm": 1.1400817822489198, + "learning_rate": 0.0002574952448335428, + "loss": 2.1053, + "step": 3792 + }, + { + "epoch": 2.4486765655261458, + "grad_norm": 1.1420456195972444, + "learning_rate": 0.00025745017524886674, + "loss": 2.2692, + "step": 3793 + }, + { + "epoch": 2.4493221433182697, + "grad_norm": 1.1658593332135518, + "learning_rate": 0.0002574050857310676, + "loss": 2.0645, + "step": 3794 + }, + { + "epoch": 2.4499677211103936, + "grad_norm": 1.0607605125759105, + "learning_rate": 0.00025735997628850986, + "loss": 1.8892, + "step": 3795 + }, + { + "epoch": 2.4506132989025176, + "grad_norm": 1.533663821993869, + "learning_rate": 0.0002573148469295619, + "loss": 2.0477, + "step": 3796 + }, + { + "epoch": 2.4512588766946415, + "grad_norm": 1.266165336543101, + "learning_rate": 0.00025726969766259563, + "loss": 2.0291, + "step": 3797 + }, + { + "epoch": 2.4519044544867654, + "grad_norm": 1.2962451103891606, + "learning_rate": 0.0002572245284959867, + "loss": 2.2491, + "step": 3798 + }, + { + "epoch": 2.45255003227889, + "grad_norm": 1.3167781611327247, + "learning_rate": 0.0002571793394381145, + "loss": 2.0295, + "step": 3799 + }, + { + "epoch": 2.4531956100710137, + "grad_norm": 1.1991189448698822, + "learning_rate": 0.00025713413049736206, + "loss": 1.825, + "step": 3800 + }, + { + "epoch": 2.4538411878631377, + "grad_norm": 1.458599148592121, + "learning_rate": 0.0002570889016821161, + "loss": 2.2791, + "step": 3801 + }, + { + "epoch": 2.4544867656552616, + "grad_norm": 1.344768563827259, + "learning_rate": 0.00025704365300076705, + "loss": 2.3959, + "step": 3802 + }, + { + "epoch": 2.4551323434473855, + "grad_norm": 1.3167910381812964, + "learning_rate": 0.000256998384461709, + "loss": 2.1707, + "step": 3803 + }, + { + "epoch": 2.4557779212395094, + "grad_norm": 1.2323122722466193, + "learning_rate": 0.0002569530960733397, + "loss": 1.8229, + "step": 3804 + }, + { + "epoch": 2.4564234990316334, + "grad_norm": 1.290983643264246, + "learning_rate": 0.00025690778784406067, + "loss": 2.212, + "step": 3805 + }, + { + "epoch": 2.4570690768237573, + "grad_norm": 1.1161300238492533, + "learning_rate": 0.000256862459782277, + "loss": 1.9486, + "step": 3806 + }, + { + "epoch": 2.4577146546158812, + "grad_norm": 1.1911289355684203, + "learning_rate": 0.0002568171118963976, + "loss": 2.1346, + "step": 3807 + }, + { + "epoch": 2.458360232408005, + "grad_norm": 1.1936119100681843, + "learning_rate": 0.00025677174419483486, + "loss": 1.9334, + "step": 3808 + }, + { + "epoch": 2.459005810200129, + "grad_norm": 1.228406927520435, + "learning_rate": 0.00025672635668600504, + "loss": 2.4587, + "step": 3809 + }, + { + "epoch": 2.459651387992253, + "grad_norm": 1.2128254409697796, + "learning_rate": 0.00025668094937832803, + "loss": 2.0677, + "step": 3810 + }, + { + "epoch": 2.460296965784377, + "grad_norm": 1.2006781021955837, + "learning_rate": 0.00025663552228022727, + "loss": 2.027, + "step": 3811 + }, + { + "epoch": 2.460942543576501, + "grad_norm": 1.2614599184947886, + "learning_rate": 0.00025659007540013, + "loss": 2.3505, + "step": 3812 + }, + { + "epoch": 2.461588121368625, + "grad_norm": 1.3202701190886337, + "learning_rate": 0.000256544608746467, + "loss": 2.0341, + "step": 3813 + }, + { + "epoch": 2.4622336991607487, + "grad_norm": 1.2212742481559768, + "learning_rate": 0.00025649912232767293, + "loss": 1.9877, + "step": 3814 + }, + { + "epoch": 2.4628792769528727, + "grad_norm": 1.2193226330526479, + "learning_rate": 0.000256453616152186, + "loss": 1.9952, + "step": 3815 + }, + { + "epoch": 2.4635248547449966, + "grad_norm": 1.1475895596842118, + "learning_rate": 0.000256408090228448, + "loss": 1.9922, + "step": 3816 + }, + { + "epoch": 2.4641704325371205, + "grad_norm": 1.090211031703917, + "learning_rate": 0.00025636254456490445, + "loss": 1.9851, + "step": 3817 + }, + { + "epoch": 2.464816010329245, + "grad_norm": 1.039621183789077, + "learning_rate": 0.00025631697917000466, + "loss": 1.8927, + "step": 3818 + }, + { + "epoch": 2.465461588121369, + "grad_norm": 1.248756018072859, + "learning_rate": 0.0002562713940522013, + "loss": 2.2545, + "step": 3819 + }, + { + "epoch": 2.4661071659134928, + "grad_norm": 1.226979971550841, + "learning_rate": 0.00025622578921995106, + "loss": 1.7831, + "step": 3820 + }, + { + "epoch": 2.4667527437056167, + "grad_norm": 1.1933493745714556, + "learning_rate": 0.000256180164681714, + "loss": 2.0202, + "step": 3821 + }, + { + "epoch": 2.4673983214977406, + "grad_norm": 1.1908626085220992, + "learning_rate": 0.00025613452044595406, + "loss": 2.1184, + "step": 3822 + }, + { + "epoch": 2.4680438992898646, + "grad_norm": 1.2690257734161015, + "learning_rate": 0.00025608885652113854, + "loss": 2.1232, + "step": 3823 + }, + { + "epoch": 2.4686894770819885, + "grad_norm": 1.1748985186601502, + "learning_rate": 0.00025604317291573876, + "loss": 2.0532, + "step": 3824 + }, + { + "epoch": 2.4693350548741124, + "grad_norm": 1.1539184571737569, + "learning_rate": 0.0002559974696382294, + "loss": 1.9769, + "step": 3825 + }, + { + "epoch": 2.4699806326662364, + "grad_norm": 1.342871303824488, + "learning_rate": 0.00025595174669708885, + "loss": 2.2872, + "step": 3826 + }, + { + "epoch": 2.4706262104583603, + "grad_norm": 1.2714165039091687, + "learning_rate": 0.0002559060041007993, + "loss": 2.1191, + "step": 3827 + }, + { + "epoch": 2.471271788250484, + "grad_norm": 1.2554010355066556, + "learning_rate": 0.0002558602418578464, + "loss": 2.1975, + "step": 3828 + }, + { + "epoch": 2.471917366042608, + "grad_norm": 1.396859704573079, + "learning_rate": 0.0002558144599767196, + "loss": 1.7799, + "step": 3829 + }, + { + "epoch": 2.472562943834732, + "grad_norm": 1.1846848951527884, + "learning_rate": 0.0002557686584659118, + "loss": 2.118, + "step": 3830 + }, + { + "epoch": 2.473208521626856, + "grad_norm": 1.2180446519932906, + "learning_rate": 0.0002557228373339198, + "loss": 2.2111, + "step": 3831 + }, + { + "epoch": 2.47385409941898, + "grad_norm": 1.1561702876134368, + "learning_rate": 0.0002556769965892438, + "loss": 1.8961, + "step": 3832 + }, + { + "epoch": 2.474499677211104, + "grad_norm": 1.1565005219303661, + "learning_rate": 0.00025563113624038766, + "loss": 2.0869, + "step": 3833 + }, + { + "epoch": 2.475145255003228, + "grad_norm": 1.1309841885329357, + "learning_rate": 0.0002555852562958591, + "loss": 1.9735, + "step": 3834 + }, + { + "epoch": 2.4757908327953517, + "grad_norm": 1.1629551182080535, + "learning_rate": 0.00025553935676416925, + "loss": 2.0785, + "step": 3835 + }, + { + "epoch": 2.4764364105874757, + "grad_norm": 1.2304337546993953, + "learning_rate": 0.000255493437653833, + "loss": 2.2145, + "step": 3836 + }, + { + "epoch": 2.4770819883795996, + "grad_norm": 1.1539980301367745, + "learning_rate": 0.00025544749897336876, + "loss": 1.8662, + "step": 3837 + }, + { + "epoch": 2.4777275661717235, + "grad_norm": 1.4148230440096183, + "learning_rate": 0.00025540154073129865, + "loss": 2.3518, + "step": 3838 + }, + { + "epoch": 2.4783731439638474, + "grad_norm": 1.210414936460964, + "learning_rate": 0.00025535556293614846, + "loss": 1.7945, + "step": 3839 + }, + { + "epoch": 2.4790187217559714, + "grad_norm": 1.232992476594822, + "learning_rate": 0.00025530956559644745, + "loss": 2.2494, + "step": 3840 + }, + { + "epoch": 2.4796642995480953, + "grad_norm": 1.2100612875393266, + "learning_rate": 0.00025526354872072867, + "loss": 2.2398, + "step": 3841 + }, + { + "epoch": 2.4803098773402197, + "grad_norm": 1.1278903499347794, + "learning_rate": 0.00025521751231752875, + "loss": 1.7146, + "step": 3842 + }, + { + "epoch": 2.4809554551323436, + "grad_norm": 1.2140707272694495, + "learning_rate": 0.0002551714563953878, + "loss": 2.3447, + "step": 3843 + }, + { + "epoch": 2.4816010329244675, + "grad_norm": 1.0754328944929656, + "learning_rate": 0.00025512538096284984, + "loss": 1.7241, + "step": 3844 + }, + { + "epoch": 2.4822466107165915, + "grad_norm": 1.2903094219292954, + "learning_rate": 0.00025507928602846216, + "loss": 2.2006, + "step": 3845 + }, + { + "epoch": 2.4828921885087154, + "grad_norm": 1.1476919523489397, + "learning_rate": 0.000255033171600776, + "loss": 2.1904, + "step": 3846 + }, + { + "epoch": 2.4835377663008393, + "grad_norm": 1.2290384259366742, + "learning_rate": 0.000254987037688346, + "loss": 2.1213, + "step": 3847 + }, + { + "epoch": 2.4841833440929633, + "grad_norm": 1.0889415842810926, + "learning_rate": 0.0002549408842997305, + "loss": 1.5704, + "step": 3848 + }, + { + "epoch": 2.484828921885087, + "grad_norm": 1.2509888386756087, + "learning_rate": 0.0002548947114434914, + "loss": 1.993, + "step": 3849 + }, + { + "epoch": 2.485474499677211, + "grad_norm": 1.183670026570829, + "learning_rate": 0.00025484851912819427, + "loss": 2.1975, + "step": 3850 + }, + { + "epoch": 2.486120077469335, + "grad_norm": 1.2523888062026014, + "learning_rate": 0.00025480230736240825, + "loss": 2.0916, + "step": 3851 + }, + { + "epoch": 2.486765655261459, + "grad_norm": 1.1420992935761316, + "learning_rate": 0.00025475607615470616, + "loss": 2.0395, + "step": 3852 + }, + { + "epoch": 2.487411233053583, + "grad_norm": 1.1985757916421103, + "learning_rate": 0.00025470982551366425, + "loss": 2.0866, + "step": 3853 + }, + { + "epoch": 2.488056810845707, + "grad_norm": 1.1223538790836491, + "learning_rate": 0.00025466355544786264, + "loss": 2.0169, + "step": 3854 + }, + { + "epoch": 2.4887023886378308, + "grad_norm": 1.2826266862585307, + "learning_rate": 0.0002546172659658848, + "loss": 2.0704, + "step": 3855 + }, + { + "epoch": 2.4893479664299547, + "grad_norm": 1.0945537490013453, + "learning_rate": 0.0002545709570763179, + "loss": 1.7685, + "step": 3856 + }, + { + "epoch": 2.4899935442220786, + "grad_norm": 1.189351037866338, + "learning_rate": 0.0002545246287877528, + "loss": 2.1823, + "step": 3857 + }, + { + "epoch": 2.4906391220142026, + "grad_norm": 1.1336724495721886, + "learning_rate": 0.00025447828110878386, + "loss": 2.0478, + "step": 3858 + }, + { + "epoch": 2.4912846998063265, + "grad_norm": 1.1618722419841285, + "learning_rate": 0.000254431914048009, + "loss": 2.2159, + "step": 3859 + }, + { + "epoch": 2.4919302775984504, + "grad_norm": 1.1829184681270368, + "learning_rate": 0.0002543855276140298, + "loss": 2.1385, + "step": 3860 + }, + { + "epoch": 2.492575855390575, + "grad_norm": 1.238337871440705, + "learning_rate": 0.0002543391218154515, + "loss": 2.1126, + "step": 3861 + }, + { + "epoch": 2.4932214331826987, + "grad_norm": 1.1830524088670549, + "learning_rate": 0.00025429269666088283, + "loss": 2.1325, + "step": 3862 + }, + { + "epoch": 2.4938670109748227, + "grad_norm": 1.1930142445250795, + "learning_rate": 0.0002542462521589361, + "loss": 2.2781, + "step": 3863 + }, + { + "epoch": 2.4945125887669466, + "grad_norm": 1.1670733126181134, + "learning_rate": 0.00025419978831822725, + "loss": 1.9541, + "step": 3864 + }, + { + "epoch": 2.4951581665590705, + "grad_norm": 1.1215324261842896, + "learning_rate": 0.0002541533051473758, + "loss": 2.0675, + "step": 3865 + }, + { + "epoch": 2.4958037443511945, + "grad_norm": 1.343895712356425, + "learning_rate": 0.00025410680265500494, + "loss": 2.1117, + "step": 3866 + }, + { + "epoch": 2.4964493221433184, + "grad_norm": 1.3606788018210838, + "learning_rate": 0.00025406028084974127, + "loss": 2.1346, + "step": 3867 + }, + { + "epoch": 2.4970948999354423, + "grad_norm": 1.214573356543052, + "learning_rate": 0.0002540137397402151, + "loss": 2.1287, + "step": 3868 + }, + { + "epoch": 2.4977404777275662, + "grad_norm": 1.3243075180356574, + "learning_rate": 0.0002539671793350603, + "loss": 2.256, + "step": 3869 + }, + { + "epoch": 2.49838605551969, + "grad_norm": 1.262167763025218, + "learning_rate": 0.0002539205996429143, + "loss": 2.1006, + "step": 3870 + }, + { + "epoch": 2.499031633311814, + "grad_norm": 1.2240985239739484, + "learning_rate": 0.00025387400067241814, + "loss": 2.2197, + "step": 3871 + }, + { + "epoch": 2.499677211103938, + "grad_norm": 1.2514677278711814, + "learning_rate": 0.00025382738243221635, + "loss": 2.0916, + "step": 3872 + }, + { + "epoch": 2.500322788896062, + "grad_norm": 1.1902006924744983, + "learning_rate": 0.0002537807449309572, + "loss": 2.2277, + "step": 3873 + }, + { + "epoch": 2.500968366688186, + "grad_norm": 1.208232317523322, + "learning_rate": 0.0002537340881772923, + "loss": 2.0991, + "step": 3874 + }, + { + "epoch": 2.50161394448031, + "grad_norm": 1.2148517596427983, + "learning_rate": 0.00025368741217987703, + "loss": 2.1006, + "step": 3875 + }, + { + "epoch": 2.5022595222724338, + "grad_norm": 1.1213896449001342, + "learning_rate": 0.00025364071694737036, + "loss": 2.0711, + "step": 3876 + }, + { + "epoch": 2.5029051000645577, + "grad_norm": 1.1915989336826234, + "learning_rate": 0.0002535940024884346, + "loss": 2.1737, + "step": 3877 + }, + { + "epoch": 2.5035506778566816, + "grad_norm": 1.153810584238906, + "learning_rate": 0.0002535472688117358, + "loss": 2.0261, + "step": 3878 + }, + { + "epoch": 2.5041962556488055, + "grad_norm": 1.194629504869019, + "learning_rate": 0.00025350051592594355, + "loss": 2.0035, + "step": 3879 + }, + { + "epoch": 2.5048418334409295, + "grad_norm": 1.2590941287697484, + "learning_rate": 0.0002534537438397311, + "loss": 2.2039, + "step": 3880 + }, + { + "epoch": 2.5054874112330534, + "grad_norm": 1.1775402951553828, + "learning_rate": 0.0002534069525617749, + "loss": 2.0673, + "step": 3881 + }, + { + "epoch": 2.5061329890251773, + "grad_norm": 1.3800044087586125, + "learning_rate": 0.00025336014210075553, + "loss": 2.3179, + "step": 3882 + }, + { + "epoch": 2.5067785668173013, + "grad_norm": 1.0274745983500766, + "learning_rate": 0.00025331331246535667, + "loss": 1.6645, + "step": 3883 + }, + { + "epoch": 2.507424144609425, + "grad_norm": 1.386412240703379, + "learning_rate": 0.00025326646366426566, + "loss": 2.4724, + "step": 3884 + }, + { + "epoch": 2.508069722401549, + "grad_norm": 1.1562014740177025, + "learning_rate": 0.00025321959570617347, + "loss": 2.1003, + "step": 3885 + }, + { + "epoch": 2.5087153001936735, + "grad_norm": 1.1736770179776044, + "learning_rate": 0.00025317270859977464, + "loss": 2.0308, + "step": 3886 + }, + { + "epoch": 2.5093608779857974, + "grad_norm": 1.159083471480571, + "learning_rate": 0.0002531258023537672, + "loss": 1.9043, + "step": 3887 + }, + { + "epoch": 2.5100064557779214, + "grad_norm": 1.0615360069519029, + "learning_rate": 0.0002530788769768527, + "loss": 2.0643, + "step": 3888 + }, + { + "epoch": 2.5106520335700453, + "grad_norm": 1.1418221502786483, + "learning_rate": 0.00025303193247773627, + "loss": 2.0463, + "step": 3889 + }, + { + "epoch": 2.5112976113621692, + "grad_norm": 1.0848181877763885, + "learning_rate": 0.0002529849688651267, + "loss": 1.8583, + "step": 3890 + }, + { + "epoch": 2.511943189154293, + "grad_norm": 1.4156788096975481, + "learning_rate": 0.0002529379861477361, + "loss": 2.3306, + "step": 3891 + }, + { + "epoch": 2.512588766946417, + "grad_norm": 1.185360742062876, + "learning_rate": 0.0002528909843342804, + "loss": 1.9956, + "step": 3892 + }, + { + "epoch": 2.513234344738541, + "grad_norm": 1.0746430502568198, + "learning_rate": 0.00025284396343347877, + "loss": 2.0074, + "step": 3893 + }, + { + "epoch": 2.513879922530665, + "grad_norm": 1.3114719207619658, + "learning_rate": 0.00025279692345405423, + "loss": 2.1521, + "step": 3894 + }, + { + "epoch": 2.514525500322789, + "grad_norm": 1.1369767348827076, + "learning_rate": 0.0002527498644047331, + "loss": 2.0401, + "step": 3895 + }, + { + "epoch": 2.515171078114913, + "grad_norm": 1.0710013015244226, + "learning_rate": 0.0002527027862942453, + "loss": 2.2975, + "step": 3896 + }, + { + "epoch": 2.5158166559070367, + "grad_norm": 1.300404736184564, + "learning_rate": 0.0002526556891313244, + "loss": 2.3028, + "step": 3897 + }, + { + "epoch": 2.5164622336991607, + "grad_norm": 1.0228637376649383, + "learning_rate": 0.0002526085729247073, + "loss": 1.8633, + "step": 3898 + }, + { + "epoch": 2.5171078114912846, + "grad_norm": 1.1372572612378433, + "learning_rate": 0.0002525614376831345, + "loss": 1.9752, + "step": 3899 + }, + { + "epoch": 2.5177533892834085, + "grad_norm": 1.1828014717192814, + "learning_rate": 0.00025251428341535034, + "loss": 2.0819, + "step": 3900 + }, + { + "epoch": 2.5183989670755325, + "grad_norm": 1.1458310228047348, + "learning_rate": 0.0002524671101301022, + "loss": 1.8843, + "step": 3901 + }, + { + "epoch": 2.5190445448676564, + "grad_norm": 1.117434194772984, + "learning_rate": 0.0002524199178361412, + "loss": 1.9874, + "step": 3902 + }, + { + "epoch": 2.5196901226597808, + "grad_norm": 1.0470549052938876, + "learning_rate": 0.0002523727065422222, + "loss": 1.7263, + "step": 3903 + }, + { + "epoch": 2.5203357004519047, + "grad_norm": 1.1546502500889542, + "learning_rate": 0.0002523254762571032, + "loss": 2.1034, + "step": 3904 + }, + { + "epoch": 2.5209812782440286, + "grad_norm": 1.2134092679419692, + "learning_rate": 0.00025227822698954596, + "loss": 2.1766, + "step": 3905 + }, + { + "epoch": 2.5216268560361526, + "grad_norm": 1.1478965827546923, + "learning_rate": 0.0002522309587483157, + "loss": 1.9775, + "step": 3906 + }, + { + "epoch": 2.5222724338282765, + "grad_norm": 1.2575844832651293, + "learning_rate": 0.0002521836715421812, + "loss": 2.4176, + "step": 3907 + }, + { + "epoch": 2.5229180116204004, + "grad_norm": 1.155358184846294, + "learning_rate": 0.0002521363653799148, + "loss": 2.1558, + "step": 3908 + }, + { + "epoch": 2.5235635894125243, + "grad_norm": 1.119772070983431, + "learning_rate": 0.0002520890402702921, + "loss": 1.8646, + "step": 3909 + }, + { + "epoch": 2.5242091672046483, + "grad_norm": 1.174936801042648, + "learning_rate": 0.00025204169622209245, + "loss": 1.722, + "step": 3910 + }, + { + "epoch": 2.524854744996772, + "grad_norm": 1.296598864815826, + "learning_rate": 0.00025199433324409883, + "loss": 1.6743, + "step": 3911 + }, + { + "epoch": 2.525500322788896, + "grad_norm": 1.1979470661484029, + "learning_rate": 0.00025194695134509733, + "loss": 2.1101, + "step": 3912 + }, + { + "epoch": 2.52614590058102, + "grad_norm": 1.1038778722230531, + "learning_rate": 0.000251899550533878, + "loss": 1.9192, + "step": 3913 + }, + { + "epoch": 2.526791478373144, + "grad_norm": 1.2833673016962728, + "learning_rate": 0.000251852130819234, + "loss": 1.9797, + "step": 3914 + }, + { + "epoch": 2.527437056165268, + "grad_norm": 1.1049862737390201, + "learning_rate": 0.0002518046922099623, + "loss": 1.7598, + "step": 3915 + }, + { + "epoch": 2.528082633957392, + "grad_norm": 1.2452714511004797, + "learning_rate": 0.00025175723471486324, + "loss": 2.0456, + "step": 3916 + }, + { + "epoch": 2.528728211749516, + "grad_norm": 1.2225161167799101, + "learning_rate": 0.0002517097583427406, + "loss": 2.0622, + "step": 3917 + }, + { + "epoch": 2.5293737895416397, + "grad_norm": 1.427574561911602, + "learning_rate": 0.00025166226310240183, + "loss": 2.1767, + "step": 3918 + }, + { + "epoch": 2.5300193673337636, + "grad_norm": 1.1876387498961023, + "learning_rate": 0.00025161474900265774, + "loss": 2.043, + "step": 3919 + }, + { + "epoch": 2.5306649451258876, + "grad_norm": 1.1363888833040077, + "learning_rate": 0.00025156721605232275, + "loss": 1.9294, + "step": 3920 + }, + { + "epoch": 2.5313105229180115, + "grad_norm": 1.7461181999560333, + "learning_rate": 0.0002515196642602146, + "loss": 2.0923, + "step": 3921 + }, + { + "epoch": 2.5319561007101354, + "grad_norm": 1.2411515442496797, + "learning_rate": 0.0002514720936351548, + "loss": 2.0168, + "step": 3922 + }, + { + "epoch": 2.5326016785022594, + "grad_norm": 1.2564693138967298, + "learning_rate": 0.000251424504185968, + "loss": 2.3162, + "step": 3923 + }, + { + "epoch": 2.5332472562943833, + "grad_norm": 1.5270029086107153, + "learning_rate": 0.00025137689592148266, + "loss": 2.1702, + "step": 3924 + }, + { + "epoch": 2.5338928340865072, + "grad_norm": 1.2817182183522624, + "learning_rate": 0.00025132926885053066, + "loss": 2.1583, + "step": 3925 + }, + { + "epoch": 2.534538411878631, + "grad_norm": 1.2104269449707499, + "learning_rate": 0.0002512816229819472, + "loss": 2.2474, + "step": 3926 + }, + { + "epoch": 2.535183989670755, + "grad_norm": 1.2977081845979659, + "learning_rate": 0.0002512339583245711, + "loss": 2.2463, + "step": 3927 + }, + { + "epoch": 2.535829567462879, + "grad_norm": 1.1979002293657433, + "learning_rate": 0.0002511862748872447, + "loss": 2.1571, + "step": 3928 + }, + { + "epoch": 2.5364751452550034, + "grad_norm": 1.3195295230480992, + "learning_rate": 0.00025113857267881375, + "loss": 2.0657, + "step": 3929 + }, + { + "epoch": 2.5371207230471273, + "grad_norm": 1.2010646331372348, + "learning_rate": 0.00025109085170812746, + "loss": 2.0161, + "step": 3930 + }, + { + "epoch": 2.5377663008392513, + "grad_norm": 1.3172487561826958, + "learning_rate": 0.0002510431119840387, + "loss": 2.2308, + "step": 3931 + }, + { + "epoch": 2.538411878631375, + "grad_norm": 1.1570144379845884, + "learning_rate": 0.0002509953535154035, + "loss": 1.7246, + "step": 3932 + }, + { + "epoch": 2.539057456423499, + "grad_norm": 1.171997539006602, + "learning_rate": 0.0002509475763110817, + "loss": 1.8557, + "step": 3933 + }, + { + "epoch": 2.539703034215623, + "grad_norm": 1.18552378495167, + "learning_rate": 0.0002508997803799364, + "loss": 2.059, + "step": 3934 + }, + { + "epoch": 2.540348612007747, + "grad_norm": 1.1963672404672028, + "learning_rate": 0.00025085196573083424, + "loss": 1.9873, + "step": 3935 + }, + { + "epoch": 2.540994189799871, + "grad_norm": 1.320707837816081, + "learning_rate": 0.00025080413237264543, + "loss": 1.9961, + "step": 3936 + }, + { + "epoch": 2.541639767591995, + "grad_norm": 1.2121721708489168, + "learning_rate": 0.00025075628031424337, + "loss": 2.0598, + "step": 3937 + }, + { + "epoch": 2.5422853453841188, + "grad_norm": 1.2228556059205697, + "learning_rate": 0.00025070840956450523, + "loss": 2.0187, + "step": 3938 + }, + { + "epoch": 2.5429309231762427, + "grad_norm": 1.2911523155610858, + "learning_rate": 0.00025066052013231156, + "loss": 2.0752, + "step": 3939 + }, + { + "epoch": 2.5435765009683666, + "grad_norm": 1.25459962500463, + "learning_rate": 0.0002506126120265463, + "loss": 2.0365, + "step": 3940 + }, + { + "epoch": 2.5442220787604906, + "grad_norm": 1.3359681903371858, + "learning_rate": 0.00025056468525609687, + "loss": 2.1954, + "step": 3941 + }, + { + "epoch": 2.5448676565526145, + "grad_norm": 1.157084412554636, + "learning_rate": 0.0002505167398298543, + "loss": 2.024, + "step": 3942 + }, + { + "epoch": 2.5455132343447384, + "grad_norm": 1.2814056070590365, + "learning_rate": 0.0002504687757567128, + "loss": 1.7198, + "step": 3943 + }, + { + "epoch": 2.5461588121368623, + "grad_norm": 1.156197574542345, + "learning_rate": 0.0002504207930455703, + "loss": 1.839, + "step": 3944 + }, + { + "epoch": 2.5468043899289863, + "grad_norm": 1.2547411167652032, + "learning_rate": 0.0002503727917053281, + "loss": 2.1783, + "step": 3945 + }, + { + "epoch": 2.5474499677211107, + "grad_norm": 1.2186165753641236, + "learning_rate": 0.00025032477174489093, + "loss": 2.1508, + "step": 3946 + }, + { + "epoch": 2.5480955455132346, + "grad_norm": 1.1600619738790137, + "learning_rate": 0.00025027673317316694, + "loss": 2.1157, + "step": 3947 + }, + { + "epoch": 2.5487411233053585, + "grad_norm": 1.13825007091403, + "learning_rate": 0.0002502286759990679, + "loss": 1.7832, + "step": 3948 + }, + { + "epoch": 2.5493867010974824, + "grad_norm": 1.1003072796225881, + "learning_rate": 0.0002501806002315088, + "loss": 2.0242, + "step": 3949 + }, + { + "epoch": 2.5500322788896064, + "grad_norm": 1.3112834593745628, + "learning_rate": 0.00025013250587940827, + "loss": 2.0772, + "step": 3950 + }, + { + "epoch": 2.5506778566817303, + "grad_norm": 1.1425517881231535, + "learning_rate": 0.00025008439295168825, + "loss": 2.0014, + "step": 3951 + }, + { + "epoch": 2.5513234344738542, + "grad_norm": 1.0869885852380217, + "learning_rate": 0.0002500362614572742, + "loss": 2.1709, + "step": 3952 + }, + { + "epoch": 2.551969012265978, + "grad_norm": 1.1986425512877448, + "learning_rate": 0.00024998811140509503, + "loss": 1.9919, + "step": 3953 + }, + { + "epoch": 2.552614590058102, + "grad_norm": 1.01719712471157, + "learning_rate": 0.00024993994280408314, + "loss": 1.8617, + "step": 3954 + }, + { + "epoch": 2.553260167850226, + "grad_norm": 1.2053149813699935, + "learning_rate": 0.0002498917556631742, + "loss": 2.0854, + "step": 3955 + }, + { + "epoch": 2.55390574564235, + "grad_norm": 1.1291913090810772, + "learning_rate": 0.0002498435499913075, + "loss": 2.0885, + "step": 3956 + }, + { + "epoch": 2.554551323434474, + "grad_norm": 1.2147929541516433, + "learning_rate": 0.0002497953257974256, + "loss": 1.9838, + "step": 3957 + }, + { + "epoch": 2.555196901226598, + "grad_norm": 1.151170835330536, + "learning_rate": 0.00024974708309047464, + "loss": 1.691, + "step": 3958 + }, + { + "epoch": 2.5558424790187217, + "grad_norm": 1.1761517501198067, + "learning_rate": 0.00024969882187940424, + "loss": 2.0745, + "step": 3959 + }, + { + "epoch": 2.5564880568108457, + "grad_norm": 1.262989494872269, + "learning_rate": 0.00024965054217316717, + "loss": 2.3019, + "step": 3960 + }, + { + "epoch": 2.5571336346029696, + "grad_norm": 1.2449687472075701, + "learning_rate": 0.00024960224398072, + "loss": 2.1988, + "step": 3961 + }, + { + "epoch": 2.5577792123950935, + "grad_norm": 1.1395156794123513, + "learning_rate": 0.0002495539273110224, + "loss": 1.6764, + "step": 3962 + }, + { + "epoch": 2.5584247901872175, + "grad_norm": 1.1520071047711353, + "learning_rate": 0.0002495055921730377, + "loss": 2.0617, + "step": 3963 + }, + { + "epoch": 2.5590703679793414, + "grad_norm": 1.095667271366946, + "learning_rate": 0.0002494572385757325, + "loss": 1.9032, + "step": 3964 + }, + { + "epoch": 2.5597159457714653, + "grad_norm": 1.2355600330437158, + "learning_rate": 0.00024940886652807695, + "loss": 2.2864, + "step": 3965 + }, + { + "epoch": 2.5603615235635893, + "grad_norm": 1.1236322517885104, + "learning_rate": 0.00024936047603904457, + "loss": 1.7608, + "step": 3966 + }, + { + "epoch": 2.561007101355713, + "grad_norm": 1.1606065295722845, + "learning_rate": 0.00024931206711761226, + "loss": 2.1476, + "step": 3967 + }, + { + "epoch": 2.561652679147837, + "grad_norm": 1.1419806176173188, + "learning_rate": 0.0002492636397727604, + "loss": 2.0816, + "step": 3968 + }, + { + "epoch": 2.562298256939961, + "grad_norm": 1.1971509420596864, + "learning_rate": 0.0002492151940134728, + "loss": 2.3042, + "step": 3969 + }, + { + "epoch": 2.562943834732085, + "grad_norm": 1.0905571481889167, + "learning_rate": 0.0002491667298487366, + "loss": 1.7655, + "step": 3970 + }, + { + "epoch": 2.563589412524209, + "grad_norm": 1.2469930365968858, + "learning_rate": 0.00024911824728754244, + "loss": 2.2568, + "step": 3971 + }, + { + "epoch": 2.564234990316333, + "grad_norm": 1.170938520924654, + "learning_rate": 0.00024906974633888423, + "loss": 2.1259, + "step": 3972 + }, + { + "epoch": 2.564880568108457, + "grad_norm": 1.0698517592742038, + "learning_rate": 0.0002490212270117596, + "loss": 1.7151, + "step": 3973 + }, + { + "epoch": 2.565526145900581, + "grad_norm": 1.2728193931349907, + "learning_rate": 0.0002489726893151692, + "loss": 2.0871, + "step": 3974 + }, + { + "epoch": 2.566171723692705, + "grad_norm": 1.2022539998183472, + "learning_rate": 0.0002489241332581174, + "loss": 1.9979, + "step": 3975 + }, + { + "epoch": 2.566817301484829, + "grad_norm": 1.1813166620572275, + "learning_rate": 0.0002488755588496118, + "loss": 2.0617, + "step": 3976 + }, + { + "epoch": 2.567462879276953, + "grad_norm": 1.203031201034494, + "learning_rate": 0.0002488269660986635, + "loss": 1.9783, + "step": 3977 + }, + { + "epoch": 2.568108457069077, + "grad_norm": 1.3287125335139134, + "learning_rate": 0.00024877835501428686, + "loss": 2.2596, + "step": 3978 + }, + { + "epoch": 2.568754034861201, + "grad_norm": 1.2260891467359836, + "learning_rate": 0.00024872972560549984, + "loss": 2.0305, + "step": 3979 + }, + { + "epoch": 2.5693996126533247, + "grad_norm": 1.2213276115761917, + "learning_rate": 0.0002486810778813236, + "loss": 1.9165, + "step": 3980 + }, + { + "epoch": 2.5700451904454487, + "grad_norm": 1.1844736194322087, + "learning_rate": 0.00024863241185078296, + "loss": 1.985, + "step": 3981 + }, + { + "epoch": 2.5706907682375726, + "grad_norm": 1.1647051711859597, + "learning_rate": 0.0002485837275229058, + "loss": 1.9883, + "step": 3982 + }, + { + "epoch": 2.5713363460296965, + "grad_norm": 1.1961374539789857, + "learning_rate": 0.0002485350249067237, + "loss": 1.9456, + "step": 3983 + }, + { + "epoch": 2.5719819238218204, + "grad_norm": 1.188906297045959, + "learning_rate": 0.00024848630401127147, + "loss": 1.957, + "step": 3984 + }, + { + "epoch": 2.5726275016139444, + "grad_norm": 1.6855144531404926, + "learning_rate": 0.0002484375648455873, + "loss": 2.3239, + "step": 3985 + }, + { + "epoch": 2.5732730794060683, + "grad_norm": 1.184948776343246, + "learning_rate": 0.00024838880741871283, + "loss": 2.2335, + "step": 3986 + }, + { + "epoch": 2.5739186571981922, + "grad_norm": 1.2611382128136266, + "learning_rate": 0.000248340031739693, + "loss": 2.1562, + "step": 3987 + }, + { + "epoch": 2.574564234990316, + "grad_norm": 1.2067805629630788, + "learning_rate": 0.0002482912378175763, + "loss": 2.1608, + "step": 3988 + }, + { + "epoch": 2.5752098127824405, + "grad_norm": 1.1445671364576293, + "learning_rate": 0.00024824242566141456, + "loss": 2.115, + "step": 3989 + }, + { + "epoch": 2.5758553905745645, + "grad_norm": 1.1123841611742498, + "learning_rate": 0.0002481935952802628, + "loss": 1.762, + "step": 3990 + }, + { + "epoch": 2.5765009683666884, + "grad_norm": 1.2882124231876044, + "learning_rate": 0.0002481447466831796, + "loss": 1.9977, + "step": 3991 + }, + { + "epoch": 2.5771465461588123, + "grad_norm": 1.0769181615187162, + "learning_rate": 0.000248095879879227, + "loss": 1.818, + "step": 3992 + }, + { + "epoch": 2.5777921239509363, + "grad_norm": 1.24817004159552, + "learning_rate": 0.0002480469948774701, + "loss": 2.0737, + "step": 3993 + }, + { + "epoch": 2.57843770174306, + "grad_norm": 1.1340672301449344, + "learning_rate": 0.00024799809168697776, + "loss": 1.9838, + "step": 3994 + }, + { + "epoch": 2.579083279535184, + "grad_norm": 1.1492578410094731, + "learning_rate": 0.000247949170316822, + "loss": 2.1983, + "step": 3995 + }, + { + "epoch": 2.579728857327308, + "grad_norm": 1.1673948534258227, + "learning_rate": 0.0002479002307760781, + "loss": 2.2624, + "step": 3996 + }, + { + "epoch": 2.580374435119432, + "grad_norm": 1.2868600307917821, + "learning_rate": 0.00024785127307382494, + "loss": 2.1869, + "step": 3997 + }, + { + "epoch": 2.581020012911556, + "grad_norm": 1.230341202340572, + "learning_rate": 0.0002478022972191448, + "loss": 2.1563, + "step": 3998 + }, + { + "epoch": 2.58166559070368, + "grad_norm": 1.1716925257103716, + "learning_rate": 0.000247753303221123, + "loss": 1.7701, + "step": 3999 + }, + { + "epoch": 2.5823111684958038, + "grad_norm": 1.1950138491256548, + "learning_rate": 0.00024770429108884857, + "loss": 2.0574, + "step": 4000 + }, + { + "epoch": 2.5829567462879277, + "grad_norm": 1.2498222020693017, + "learning_rate": 0.0002476552608314137, + "loss": 1.8196, + "step": 4001 + }, + { + "epoch": 2.5836023240800516, + "grad_norm": 1.1679320068531345, + "learning_rate": 0.0002476062124579141, + "loss": 2.1243, + "step": 4002 + }, + { + "epoch": 2.5842479018721756, + "grad_norm": 1.3961126903704524, + "learning_rate": 0.0002475571459774487, + "loss": 2.1041, + "step": 4003 + }, + { + "epoch": 2.5848934796642995, + "grad_norm": 1.2334305241230878, + "learning_rate": 0.0002475080613991198, + "loss": 2.2411, + "step": 4004 + }, + { + "epoch": 2.5855390574564234, + "grad_norm": 1.1826164085394701, + "learning_rate": 0.00024745895873203315, + "loss": 2.0447, + "step": 4005 + }, + { + "epoch": 2.5861846352485474, + "grad_norm": 1.2518780764158897, + "learning_rate": 0.0002474098379852978, + "loss": 2.1815, + "step": 4006 + }, + { + "epoch": 2.5868302130406713, + "grad_norm": 1.2134142861039916, + "learning_rate": 0.00024736069916802617, + "loss": 2.0024, + "step": 4007 + }, + { + "epoch": 2.587475790832795, + "grad_norm": 1.172760571120846, + "learning_rate": 0.000247311542289334, + "loss": 2.1606, + "step": 4008 + }, + { + "epoch": 2.588121368624919, + "grad_norm": 1.2105491972675086, + "learning_rate": 0.0002472623673583404, + "loss": 1.9738, + "step": 4009 + }, + { + "epoch": 2.588766946417043, + "grad_norm": 1.2495928068855433, + "learning_rate": 0.00024721317438416784, + "loss": 1.9825, + "step": 4010 + }, + { + "epoch": 2.589412524209167, + "grad_norm": 1.1209844758884961, + "learning_rate": 0.0002471639633759421, + "loss": 1.6073, + "step": 4011 + }, + { + "epoch": 2.590058102001291, + "grad_norm": 1.1495679750369052, + "learning_rate": 0.00024711473434279235, + "loss": 1.7072, + "step": 4012 + }, + { + "epoch": 2.590703679793415, + "grad_norm": 1.0616578618502692, + "learning_rate": 0.00024706548729385114, + "loss": 1.6494, + "step": 4013 + }, + { + "epoch": 2.591349257585539, + "grad_norm": 1.1566182673066536, + "learning_rate": 0.00024701622223825424, + "loss": 1.8471, + "step": 4014 + }, + { + "epoch": 2.5919948353776627, + "grad_norm": 1.2573430292023924, + "learning_rate": 0.00024696693918514084, + "loss": 2.0664, + "step": 4015 + }, + { + "epoch": 2.592640413169787, + "grad_norm": 1.0013897124111768, + "learning_rate": 0.00024691763814365346, + "loss": 1.7013, + "step": 4016 + }, + { + "epoch": 2.593285990961911, + "grad_norm": 1.1680109447324072, + "learning_rate": 0.000246868319122938, + "loss": 1.9711, + "step": 4017 + }, + { + "epoch": 2.593931568754035, + "grad_norm": 1.172649480662834, + "learning_rate": 0.00024681898213214356, + "loss": 1.9491, + "step": 4018 + }, + { + "epoch": 2.594577146546159, + "grad_norm": 1.2922842005357573, + "learning_rate": 0.00024676962718042276, + "loss": 2.0969, + "step": 4019 + }, + { + "epoch": 2.595222724338283, + "grad_norm": 1.1192067786412783, + "learning_rate": 0.0002467202542769314, + "loss": 2.0138, + "step": 4020 + }, + { + "epoch": 2.5958683021304068, + "grad_norm": 1.297001198683676, + "learning_rate": 0.0002466708634308287, + "loss": 2.1453, + "step": 4021 + }, + { + "epoch": 2.5965138799225307, + "grad_norm": 1.1114180566687362, + "learning_rate": 0.00024662145465127717, + "loss": 1.9395, + "step": 4022 + }, + { + "epoch": 2.5971594577146546, + "grad_norm": 1.1245826770245244, + "learning_rate": 0.00024657202794744256, + "loss": 2.2311, + "step": 4023 + }, + { + "epoch": 2.5978050355067785, + "grad_norm": 1.1243276935142639, + "learning_rate": 0.0002465225833284942, + "loss": 1.9392, + "step": 4024 + }, + { + "epoch": 2.5984506132989025, + "grad_norm": 1.1709392871362738, + "learning_rate": 0.00024647312080360446, + "loss": 2.2183, + "step": 4025 + }, + { + "epoch": 2.5990961910910264, + "grad_norm": 1.3828781468391584, + "learning_rate": 0.0002464236403819492, + "loss": 2.1909, + "step": 4026 + }, + { + "epoch": 2.5997417688831503, + "grad_norm": 1.2210967975896836, + "learning_rate": 0.0002463741420727075, + "loss": 2.0413, + "step": 4027 + }, + { + "epoch": 2.6003873466752743, + "grad_norm": 1.1807780931427254, + "learning_rate": 0.00024632462588506184, + "loss": 2.0772, + "step": 4028 + }, + { + "epoch": 2.601032924467398, + "grad_norm": 1.2056007494276026, + "learning_rate": 0.00024627509182819805, + "loss": 1.9007, + "step": 4029 + }, + { + "epoch": 2.601678502259522, + "grad_norm": 1.0389851470913223, + "learning_rate": 0.0002462255399113051, + "loss": 1.6698, + "step": 4030 + }, + { + "epoch": 2.602324080051646, + "grad_norm": 1.2653442624559192, + "learning_rate": 0.0002461759701435755, + "loss": 2.1587, + "step": 4031 + }, + { + "epoch": 2.6029696578437704, + "grad_norm": 1.1259926429329319, + "learning_rate": 0.00024612638253420483, + "loss": 1.7165, + "step": 4032 + }, + { + "epoch": 2.6036152356358944, + "grad_norm": 1.2126909553918142, + "learning_rate": 0.0002460767770923922, + "loss": 2.0318, + "step": 4033 + }, + { + "epoch": 2.6042608134280183, + "grad_norm": 1.1599899616515366, + "learning_rate": 0.00024602715382734, + "loss": 2.0358, + "step": 4034 + }, + { + "epoch": 2.6049063912201422, + "grad_norm": 1.3605152759102845, + "learning_rate": 0.0002459775127482536, + "loss": 2.3707, + "step": 4035 + }, + { + "epoch": 2.605551969012266, + "grad_norm": 1.230687701250411, + "learning_rate": 0.0002459278538643422, + "loss": 2.1258, + "step": 4036 + }, + { + "epoch": 2.60619754680439, + "grad_norm": 1.1262554949997219, + "learning_rate": 0.00024587817718481797, + "loss": 2.0496, + "step": 4037 + }, + { + "epoch": 2.606843124596514, + "grad_norm": 1.1861883910723747, + "learning_rate": 0.0002458284827188964, + "loss": 2.1734, + "step": 4038 + }, + { + "epoch": 2.607488702388638, + "grad_norm": 1.2792477368088593, + "learning_rate": 0.0002457787704757963, + "loss": 2.0748, + "step": 4039 + }, + { + "epoch": 2.608134280180762, + "grad_norm": 1.0819126364459724, + "learning_rate": 0.00024572904046474, + "loss": 2.0256, + "step": 4040 + }, + { + "epoch": 2.608779857972886, + "grad_norm": 1.0692265935442238, + "learning_rate": 0.00024567929269495264, + "loss": 1.9208, + "step": 4041 + }, + { + "epoch": 2.6094254357650097, + "grad_norm": 1.1283378593253155, + "learning_rate": 0.0002456295271756632, + "loss": 1.8798, + "step": 4042 + }, + { + "epoch": 2.6100710135571337, + "grad_norm": 1.1482442578105876, + "learning_rate": 0.0002455797439161036, + "loss": 2.0372, + "step": 4043 + }, + { + "epoch": 2.6107165913492576, + "grad_norm": 1.1697876500464028, + "learning_rate": 0.000245529942925509, + "loss": 2.0217, + "step": 4044 + }, + { + "epoch": 2.6113621691413815, + "grad_norm": 1.3652341732041826, + "learning_rate": 0.0002454801242131183, + "loss": 2.2107, + "step": 4045 + }, + { + "epoch": 2.6120077469335055, + "grad_norm": 1.244416356652788, + "learning_rate": 0.00024543028778817327, + "loss": 2.1831, + "step": 4046 + }, + { + "epoch": 2.6126533247256294, + "grad_norm": 1.3120439186431174, + "learning_rate": 0.00024538043365991897, + "loss": 2.3589, + "step": 4047 + }, + { + "epoch": 2.6132989025177533, + "grad_norm": 1.097864564834298, + "learning_rate": 0.000245330561837604, + "loss": 1.9493, + "step": 4048 + }, + { + "epoch": 2.6139444803098772, + "grad_norm": 1.1442379465219656, + "learning_rate": 0.00024528067233048004, + "loss": 1.888, + "step": 4049 + }, + { + "epoch": 2.614590058102001, + "grad_norm": 1.3565160965651606, + "learning_rate": 0.0002452307651478021, + "loss": 2.1561, + "step": 4050 + }, + { + "epoch": 2.615235635894125, + "grad_norm": 1.2113956394946375, + "learning_rate": 0.00024518084029882847, + "loss": 2.1436, + "step": 4051 + }, + { + "epoch": 2.615881213686249, + "grad_norm": 1.14180506758021, + "learning_rate": 0.0002451308977928208, + "loss": 2.1764, + "step": 4052 + }, + { + "epoch": 2.616526791478373, + "grad_norm": 1.1254755092019637, + "learning_rate": 0.0002450809376390439, + "loss": 2.0958, + "step": 4053 + }, + { + "epoch": 2.617172369270497, + "grad_norm": 1.1428839562778383, + "learning_rate": 0.0002450309598467659, + "loss": 1.8048, + "step": 4054 + }, + { + "epoch": 2.617817947062621, + "grad_norm": 1.1404241272685227, + "learning_rate": 0.0002449809644252582, + "loss": 2.0065, + "step": 4055 + }, + { + "epoch": 2.6184635248547448, + "grad_norm": 1.1650918451113959, + "learning_rate": 0.0002449309513837954, + "loss": 1.9269, + "step": 4056 + }, + { + "epoch": 2.6191091026468687, + "grad_norm": 1.2215872622167878, + "learning_rate": 0.0002448809207316555, + "loss": 2.3432, + "step": 4057 + }, + { + "epoch": 2.6197546804389926, + "grad_norm": 4.191406534433176, + "learning_rate": 0.00024483087247811983, + "loss": 2.0314, + "step": 4058 + }, + { + "epoch": 2.620400258231117, + "grad_norm": 1.1565008008362228, + "learning_rate": 0.0002447808066324727, + "loss": 2.0256, + "step": 4059 + }, + { + "epoch": 2.621045836023241, + "grad_norm": 1.1135950111980706, + "learning_rate": 0.0002447307232040018, + "loss": 1.9868, + "step": 4060 + }, + { + "epoch": 2.621691413815365, + "grad_norm": 1.3090163143239713, + "learning_rate": 0.00024468062220199825, + "loss": 2.1893, + "step": 4061 + }, + { + "epoch": 2.622336991607489, + "grad_norm": 1.344356191596588, + "learning_rate": 0.00024463050363575633, + "loss": 2.2838, + "step": 4062 + }, + { + "epoch": 2.6229825693996127, + "grad_norm": 1.158196391906234, + "learning_rate": 0.0002445803675145734, + "loss": 1.9954, + "step": 4063 + }, + { + "epoch": 2.6236281471917366, + "grad_norm": 1.106751513418233, + "learning_rate": 0.0002445302138477504, + "loss": 2.0045, + "step": 4064 + }, + { + "epoch": 2.6242737249838606, + "grad_norm": 1.3107106264319694, + "learning_rate": 0.00024448004264459124, + "loss": 2.2307, + "step": 4065 + }, + { + "epoch": 2.6249193027759845, + "grad_norm": 1.0825291297580546, + "learning_rate": 0.0002444298539144032, + "loss": 1.7122, + "step": 4066 + }, + { + "epoch": 2.6255648805681084, + "grad_norm": 1.2548769519395184, + "learning_rate": 0.00024437964766649696, + "loss": 2.0862, + "step": 4067 + }, + { + "epoch": 2.6262104583602324, + "grad_norm": 1.25533800242059, + "learning_rate": 0.0002443294239101861, + "loss": 2.0178, + "step": 4068 + }, + { + "epoch": 2.6268560361523563, + "grad_norm": 1.289057162223633, + "learning_rate": 0.00024427918265478775, + "loss": 2.1411, + "step": 4069 + }, + { + "epoch": 2.6275016139444802, + "grad_norm": 1.2655037501685276, + "learning_rate": 0.0002442289239096222, + "loss": 2.3757, + "step": 4070 + }, + { + "epoch": 2.628147191736604, + "grad_norm": 1.1438394753801782, + "learning_rate": 0.0002441786476840129, + "loss": 1.9413, + "step": 4071 + }, + { + "epoch": 2.628792769528728, + "grad_norm": 1.1842902208967354, + "learning_rate": 0.00024412835398728664, + "loss": 2.2519, + "step": 4072 + }, + { + "epoch": 2.629438347320852, + "grad_norm": 1.1805729522390667, + "learning_rate": 0.00024407804282877346, + "loss": 2.1306, + "step": 4073 + }, + { + "epoch": 2.630083925112976, + "grad_norm": 1.2151049145855075, + "learning_rate": 0.00024402771421780655, + "loss": 1.6945, + "step": 4074 + }, + { + "epoch": 2.6307295029051003, + "grad_norm": 1.568083981671778, + "learning_rate": 0.00024397736816372248, + "loss": 2.3713, + "step": 4075 + }, + { + "epoch": 2.6313750806972243, + "grad_norm": 1.276617066440064, + "learning_rate": 0.00024392700467586084, + "loss": 2.0701, + "step": 4076 + }, + { + "epoch": 2.632020658489348, + "grad_norm": 1.2152300445835746, + "learning_rate": 0.00024387662376356467, + "loss": 2.0631, + "step": 4077 + }, + { + "epoch": 2.632666236281472, + "grad_norm": 1.166909731528117, + "learning_rate": 0.00024382622543618013, + "loss": 2.0759, + "step": 4078 + }, + { + "epoch": 2.633311814073596, + "grad_norm": 1.2430568910903985, + "learning_rate": 0.00024377580970305661, + "loss": 2.2241, + "step": 4079 + }, + { + "epoch": 2.63395739186572, + "grad_norm": 1.123712247350615, + "learning_rate": 0.00024372537657354676, + "loss": 1.9572, + "step": 4080 + }, + { + "epoch": 2.634602969657844, + "grad_norm": 1.1589710623796996, + "learning_rate": 0.00024367492605700648, + "loss": 2.2328, + "step": 4081 + }, + { + "epoch": 2.635248547449968, + "grad_norm": 1.1906209813411965, + "learning_rate": 0.00024362445816279485, + "loss": 2.2078, + "step": 4082 + }, + { + "epoch": 2.6358941252420918, + "grad_norm": 1.3355535012537811, + "learning_rate": 0.0002435739729002742, + "loss": 2.2458, + "step": 4083 + }, + { + "epoch": 2.6365397030342157, + "grad_norm": 1.2107611930261657, + "learning_rate": 0.00024352347027881003, + "loss": 2.1716, + "step": 4084 + }, + { + "epoch": 2.6371852808263396, + "grad_norm": 1.2937097406939597, + "learning_rate": 0.0002434729503077711, + "loss": 2.008, + "step": 4085 + }, + { + "epoch": 2.6378308586184636, + "grad_norm": 1.1890498857037908, + "learning_rate": 0.00024342241299652945, + "loss": 2.1163, + "step": 4086 + }, + { + "epoch": 2.6384764364105875, + "grad_norm": 1.2867587544016552, + "learning_rate": 0.0002433718583544602, + "loss": 1.9382, + "step": 4087 + }, + { + "epoch": 2.6391220142027114, + "grad_norm": 1.139483295408715, + "learning_rate": 0.0002433212863909418, + "loss": 2.0731, + "step": 4088 + }, + { + "epoch": 2.6397675919948353, + "grad_norm": 1.2647645594024215, + "learning_rate": 0.00024327069711535589, + "loss": 1.9387, + "step": 4089 + }, + { + "epoch": 2.6404131697869593, + "grad_norm": 1.163299585446328, + "learning_rate": 0.0002432200905370873, + "loss": 1.8196, + "step": 4090 + }, + { + "epoch": 2.641058747579083, + "grad_norm": 1.2600029844188252, + "learning_rate": 0.00024316946666552407, + "loss": 1.8027, + "step": 4091 + }, + { + "epoch": 2.641704325371207, + "grad_norm": 1.3405973850614028, + "learning_rate": 0.0002431188255100574, + "loss": 2.0363, + "step": 4092 + }, + { + "epoch": 2.642349903163331, + "grad_norm": 1.146689673915452, + "learning_rate": 0.00024306816708008182, + "loss": 1.9819, + "step": 4093 + }, + { + "epoch": 2.642995480955455, + "grad_norm": 1.265598520710304, + "learning_rate": 0.000243017491384995, + "loss": 2.1479, + "step": 4094 + }, + { + "epoch": 2.643641058747579, + "grad_norm": 1.3146519368220362, + "learning_rate": 0.00024296679843419772, + "loss": 1.9724, + "step": 4095 + }, + { + "epoch": 2.644286636539703, + "grad_norm": 1.2625820450240886, + "learning_rate": 0.00024291608823709415, + "loss": 2.3631, + "step": 4096 + }, + { + "epoch": 2.644932214331827, + "grad_norm": 1.1217494188517805, + "learning_rate": 0.00024286536080309152, + "loss": 2.0155, + "step": 4097 + }, + { + "epoch": 2.6455777921239507, + "grad_norm": 1.2514491140171151, + "learning_rate": 0.00024281461614160033, + "loss": 2.1105, + "step": 4098 + }, + { + "epoch": 2.6462233699160747, + "grad_norm": 1.2042551914787594, + "learning_rate": 0.00024276385426203416, + "loss": 1.8789, + "step": 4099 + }, + { + "epoch": 2.6468689477081986, + "grad_norm": 1.2599223100961907, + "learning_rate": 0.0002427130751738099, + "loss": 1.7922, + "step": 4100 + }, + { + "epoch": 2.6475145255003225, + "grad_norm": 1.1279023418010357, + "learning_rate": 0.00024266227888634762, + "loss": 1.7435, + "step": 4101 + }, + { + "epoch": 2.648160103292447, + "grad_norm": 1.1810819293906056, + "learning_rate": 0.00024261146540907052, + "loss": 2.2582, + "step": 4102 + }, + { + "epoch": 2.648805681084571, + "grad_norm": 1.1800140208116645, + "learning_rate": 0.00024256063475140512, + "loss": 2.1497, + "step": 4103 + }, + { + "epoch": 2.6494512588766947, + "grad_norm": 1.207703143563203, + "learning_rate": 0.00024250978692278094, + "loss": 2.3023, + "step": 4104 + }, + { + "epoch": 2.6500968366688187, + "grad_norm": 1.1215676530124838, + "learning_rate": 0.00024245892193263077, + "loss": 1.911, + "step": 4105 + }, + { + "epoch": 2.6507424144609426, + "grad_norm": 1.1811772487798617, + "learning_rate": 0.00024240803979039068, + "loss": 2.2873, + "step": 4106 + }, + { + "epoch": 2.6513879922530665, + "grad_norm": 1.2652646549351798, + "learning_rate": 0.00024235714050549977, + "loss": 2.3081, + "step": 4107 + }, + { + "epoch": 2.6520335700451905, + "grad_norm": 1.2431700768255227, + "learning_rate": 0.0002423062240874004, + "loss": 2.1598, + "step": 4108 + }, + { + "epoch": 2.6526791478373144, + "grad_norm": 1.133802269164375, + "learning_rate": 0.00024225529054553813, + "loss": 2.0326, + "step": 4109 + }, + { + "epoch": 2.6533247256294383, + "grad_norm": 1.0821365311793403, + "learning_rate": 0.0002422043398893616, + "loss": 1.9415, + "step": 4110 + }, + { + "epoch": 2.6539703034215623, + "grad_norm": 1.2106943772073957, + "learning_rate": 0.00024215337212832273, + "loss": 1.9217, + "step": 4111 + }, + { + "epoch": 2.654615881213686, + "grad_norm": 1.2134815104515757, + "learning_rate": 0.00024210238727187655, + "loss": 2.2907, + "step": 4112 + }, + { + "epoch": 2.65526145900581, + "grad_norm": 1.345203980164066, + "learning_rate": 0.00024205138532948129, + "loss": 2.1668, + "step": 4113 + }, + { + "epoch": 2.655907036797934, + "grad_norm": 1.2698087537263005, + "learning_rate": 0.00024200036631059834, + "loss": 2.2817, + "step": 4114 + }, + { + "epoch": 2.656552614590058, + "grad_norm": 1.2394294207469996, + "learning_rate": 0.00024194933022469222, + "loss": 2.3826, + "step": 4115 + }, + { + "epoch": 2.657198192382182, + "grad_norm": 1.2236074382032576, + "learning_rate": 0.00024189827708123073, + "loss": 2.1617, + "step": 4116 + }, + { + "epoch": 2.657843770174306, + "grad_norm": 1.1747544443085023, + "learning_rate": 0.0002418472068896847, + "loss": 2.112, + "step": 4117 + }, + { + "epoch": 2.65848934796643, + "grad_norm": 1.0872374191948657, + "learning_rate": 0.00024179611965952815, + "loss": 1.6603, + "step": 4118 + }, + { + "epoch": 2.659134925758554, + "grad_norm": 1.1506853886792092, + "learning_rate": 0.00024174501540023837, + "loss": 2.0397, + "step": 4119 + }, + { + "epoch": 2.659780503550678, + "grad_norm": 1.2090869792429464, + "learning_rate": 0.00024169389412129568, + "loss": 2.2556, + "step": 4120 + }, + { + "epoch": 2.660426081342802, + "grad_norm": 1.1293361309038745, + "learning_rate": 0.00024164275583218365, + "loss": 1.732, + "step": 4121 + }, + { + "epoch": 2.661071659134926, + "grad_norm": 1.0921666754065489, + "learning_rate": 0.00024159160054238897, + "loss": 2.0493, + "step": 4122 + }, + { + "epoch": 2.66171723692705, + "grad_norm": 1.2840819589076469, + "learning_rate": 0.0002415404282614014, + "loss": 2.0816, + "step": 4123 + }, + { + "epoch": 2.662362814719174, + "grad_norm": 1.2243535490216295, + "learning_rate": 0.00024148923899871397, + "loss": 1.9206, + "step": 4124 + }, + { + "epoch": 2.6630083925112977, + "grad_norm": 1.150563971999031, + "learning_rate": 0.00024143803276382284, + "loss": 1.8606, + "step": 4125 + }, + { + "epoch": 2.6636539703034217, + "grad_norm": 1.184181582754721, + "learning_rate": 0.0002413868095662273, + "loss": 1.9886, + "step": 4126 + }, + { + "epoch": 2.6642995480955456, + "grad_norm": 1.1549750008208746, + "learning_rate": 0.00024133556941542976, + "loss": 1.7219, + "step": 4127 + }, + { + "epoch": 2.6649451258876695, + "grad_norm": 1.1759224582929368, + "learning_rate": 0.0002412843123209358, + "loss": 2.1166, + "step": 4128 + }, + { + "epoch": 2.6655907036797934, + "grad_norm": 1.3212392512059725, + "learning_rate": 0.00024123303829225415, + "loss": 2.0222, + "step": 4129 + }, + { + "epoch": 2.6662362814719174, + "grad_norm": 1.242148425858899, + "learning_rate": 0.00024118174733889664, + "loss": 2.3461, + "step": 4130 + }, + { + "epoch": 2.6668818592640413, + "grad_norm": 1.1477844861228883, + "learning_rate": 0.00024113043947037835, + "loss": 2.1408, + "step": 4131 + }, + { + "epoch": 2.6675274370561652, + "grad_norm": 1.1056929700330294, + "learning_rate": 0.0002410791146962174, + "loss": 2.019, + "step": 4132 + }, + { + "epoch": 2.668173014848289, + "grad_norm": 1.1238135290937106, + "learning_rate": 0.00024102777302593503, + "loss": 1.7118, + "step": 4133 + }, + { + "epoch": 2.668818592640413, + "grad_norm": 1.1679796957602637, + "learning_rate": 0.00024097641446905568, + "loss": 1.7024, + "step": 4134 + }, + { + "epoch": 2.669464170432537, + "grad_norm": 1.1326844208516285, + "learning_rate": 0.00024092503903510687, + "loss": 1.7256, + "step": 4135 + }, + { + "epoch": 2.670109748224661, + "grad_norm": 1.2291640172480958, + "learning_rate": 0.0002408736467336193, + "loss": 2.2178, + "step": 4136 + }, + { + "epoch": 2.670755326016785, + "grad_norm": 1.0341972170066673, + "learning_rate": 0.0002408222375741268, + "loss": 1.71, + "step": 4137 + }, + { + "epoch": 2.671400903808909, + "grad_norm": 1.333427059401983, + "learning_rate": 0.00024077081156616625, + "loss": 2.1101, + "step": 4138 + }, + { + "epoch": 2.6720464816010328, + "grad_norm": 1.1909066428079282, + "learning_rate": 0.00024071936871927774, + "loss": 2.0432, + "step": 4139 + }, + { + "epoch": 2.6726920593931567, + "grad_norm": 1.0763278655748791, + "learning_rate": 0.0002406679090430045, + "loss": 1.8444, + "step": 4140 + }, + { + "epoch": 2.6733376371852806, + "grad_norm": 1.1762087977673619, + "learning_rate": 0.0002406164325468927, + "loss": 2.1591, + "step": 4141 + }, + { + "epoch": 2.6739832149774045, + "grad_norm": 1.228522849528347, + "learning_rate": 0.00024056493924049188, + "loss": 2.1619, + "step": 4142 + }, + { + "epoch": 2.6746287927695285, + "grad_norm": 1.2120551731850737, + "learning_rate": 0.00024051342913335454, + "loss": 2.23, + "step": 4143 + }, + { + "epoch": 2.6752743705616524, + "grad_norm": 1.2546814661054564, + "learning_rate": 0.0002404619022350363, + "loss": 2.227, + "step": 4144 + }, + { + "epoch": 2.675919948353777, + "grad_norm": 1.2296269749794793, + "learning_rate": 0.00024041035855509605, + "loss": 2.2156, + "step": 4145 + }, + { + "epoch": 2.6765655261459007, + "grad_norm": 1.2111511642968227, + "learning_rate": 0.00024035879810309555, + "loss": 2.164, + "step": 4146 + }, + { + "epoch": 2.6772111039380246, + "grad_norm": 1.3361672788170942, + "learning_rate": 0.00024030722088859996, + "loss": 1.9987, + "step": 4147 + }, + { + "epoch": 2.6778566817301486, + "grad_norm": 1.2803270728856602, + "learning_rate": 0.00024025562692117717, + "loss": 2.1133, + "step": 4148 + }, + { + "epoch": 2.6785022595222725, + "grad_norm": 1.2074878492206351, + "learning_rate": 0.00024020401621039855, + "loss": 2.1214, + "step": 4149 + }, + { + "epoch": 2.6791478373143964, + "grad_norm": 1.194056599750226, + "learning_rate": 0.00024015238876583844, + "loss": 2.1153, + "step": 4150 + }, + { + "epoch": 2.6797934151065204, + "grad_norm": 1.105812083352454, + "learning_rate": 0.0002401007445970742, + "loss": 2.0235, + "step": 4151 + }, + { + "epoch": 2.6804389928986443, + "grad_norm": 1.1722379630071886, + "learning_rate": 0.00024004908371368628, + "loss": 2.023, + "step": 4152 + }, + { + "epoch": 2.681084570690768, + "grad_norm": 1.2132342091279547, + "learning_rate": 0.0002399974061252585, + "loss": 2.2941, + "step": 4153 + }, + { + "epoch": 2.681730148482892, + "grad_norm": 1.1225868344926038, + "learning_rate": 0.00023994571184137746, + "loss": 1.9608, + "step": 4154 + }, + { + "epoch": 2.682375726275016, + "grad_norm": 1.1691111245807937, + "learning_rate": 0.000239894000871633, + "loss": 2.1446, + "step": 4155 + }, + { + "epoch": 2.68302130406714, + "grad_norm": 1.0330388236419714, + "learning_rate": 0.00023984227322561809, + "loss": 2.0827, + "step": 4156 + }, + { + "epoch": 2.683666881859264, + "grad_norm": 1.2879705161393764, + "learning_rate": 0.00023979052891292868, + "loss": 2.2082, + "step": 4157 + }, + { + "epoch": 2.684312459651388, + "grad_norm": 1.1574397011644415, + "learning_rate": 0.00023973876794316392, + "loss": 2.1255, + "step": 4158 + }, + { + "epoch": 2.684958037443512, + "grad_norm": 1.1301190417625786, + "learning_rate": 0.00023968699032592608, + "loss": 2.0613, + "step": 4159 + }, + { + "epoch": 2.6856036152356357, + "grad_norm": 1.0199850612099033, + "learning_rate": 0.00023963519607082026, + "loss": 1.5587, + "step": 4160 + }, + { + "epoch": 2.68624919302776, + "grad_norm": 1.3099096322138373, + "learning_rate": 0.00023958338518745498, + "loss": 1.8695, + "step": 4161 + }, + { + "epoch": 2.686894770819884, + "grad_norm": 1.2455584822888677, + "learning_rate": 0.00023953155768544168, + "loss": 2.2985, + "step": 4162 + }, + { + "epoch": 2.687540348612008, + "grad_norm": 1.170437124050374, + "learning_rate": 0.00023947971357439482, + "loss": 1.7486, + "step": 4163 + }, + { + "epoch": 2.688185926404132, + "grad_norm": 1.1906562639136409, + "learning_rate": 0.0002394278528639321, + "loss": 2.3082, + "step": 4164 + }, + { + "epoch": 2.688831504196256, + "grad_norm": 1.188249866055845, + "learning_rate": 0.00023937597556367423, + "loss": 1.8918, + "step": 4165 + }, + { + "epoch": 2.6894770819883798, + "grad_norm": 1.171530908545607, + "learning_rate": 0.0002393240816832449, + "loss": 2.0178, + "step": 4166 + }, + { + "epoch": 2.6901226597805037, + "grad_norm": 1.032412917513623, + "learning_rate": 0.00023927217123227105, + "loss": 1.5797, + "step": 4167 + }, + { + "epoch": 2.6907682375726276, + "grad_norm": 1.144907428962246, + "learning_rate": 0.00023922024422038255, + "loss": 2.0413, + "step": 4168 + }, + { + "epoch": 2.6914138153647515, + "grad_norm": 1.1377123204251658, + "learning_rate": 0.00023916830065721248, + "loss": 1.8159, + "step": 4169 + }, + { + "epoch": 2.6920593931568755, + "grad_norm": 1.3211762894722359, + "learning_rate": 0.00023911634055239682, + "loss": 2.225, + "step": 4170 + }, + { + "epoch": 2.6927049709489994, + "grad_norm": 1.257437780892538, + "learning_rate": 0.0002390643639155747, + "loss": 2.141, + "step": 4171 + }, + { + "epoch": 2.6933505487411233, + "grad_norm": 1.2928215018905405, + "learning_rate": 0.0002390123707563884, + "loss": 2.2475, + "step": 4172 + }, + { + "epoch": 2.6939961265332473, + "grad_norm": 1.15720530098395, + "learning_rate": 0.0002389603610844832, + "loss": 1.9528, + "step": 4173 + }, + { + "epoch": 2.694641704325371, + "grad_norm": 1.196773250846871, + "learning_rate": 0.00023890833490950732, + "loss": 2.148, + "step": 4174 + }, + { + "epoch": 2.695287282117495, + "grad_norm": 1.1483743021809822, + "learning_rate": 0.00023885629224111227, + "loss": 1.9451, + "step": 4175 + }, + { + "epoch": 2.695932859909619, + "grad_norm": 1.293597174093288, + "learning_rate": 0.0002388042330889525, + "loss": 2.016, + "step": 4176 + }, + { + "epoch": 2.696578437701743, + "grad_norm": 1.2045329740762618, + "learning_rate": 0.00023875215746268542, + "loss": 1.9329, + "step": 4177 + }, + { + "epoch": 2.697224015493867, + "grad_norm": 1.0984122382936603, + "learning_rate": 0.0002387000653719717, + "loss": 2.0408, + "step": 4178 + }, + { + "epoch": 2.697869593285991, + "grad_norm": 1.2419072503476907, + "learning_rate": 0.00023864795682647497, + "loss": 2.2603, + "step": 4179 + }, + { + "epoch": 2.698515171078115, + "grad_norm": 1.0384353043207208, + "learning_rate": 0.0002385958318358618, + "loss": 1.6933, + "step": 4180 + }, + { + "epoch": 2.6991607488702387, + "grad_norm": 1.2171601961205962, + "learning_rate": 0.00023854369040980202, + "loss": 2.2257, + "step": 4181 + }, + { + "epoch": 2.6998063266623626, + "grad_norm": 1.003509757259361, + "learning_rate": 0.00023849153255796836, + "loss": 1.5479, + "step": 4182 + }, + { + "epoch": 2.7004519044544866, + "grad_norm": 1.290754055072564, + "learning_rate": 0.00023843935829003668, + "loss": 2.2156, + "step": 4183 + }, + { + "epoch": 2.7010974822466105, + "grad_norm": 1.1583231068394513, + "learning_rate": 0.00023838716761568577, + "loss": 2.1554, + "step": 4184 + }, + { + "epoch": 2.7017430600387344, + "grad_norm": 1.2265745011963143, + "learning_rate": 0.00023833496054459762, + "loss": 1.9942, + "step": 4185 + }, + { + "epoch": 2.7023886378308584, + "grad_norm": 1.1112439873281428, + "learning_rate": 0.0002382827370864572, + "loss": 1.7419, + "step": 4186 + }, + { + "epoch": 2.7030342156229823, + "grad_norm": 1.1034569125472522, + "learning_rate": 0.0002382304972509524, + "loss": 1.7288, + "step": 4187 + }, + { + "epoch": 2.7036797934151067, + "grad_norm": 1.08501424643897, + "learning_rate": 0.00023817824104777432, + "loss": 1.8885, + "step": 4188 + }, + { + "epoch": 2.7043253712072306, + "grad_norm": 1.1412369589174023, + "learning_rate": 0.00023812596848661708, + "loss": 2.0059, + "step": 4189 + }, + { + "epoch": 2.7049709489993545, + "grad_norm": 1.1165696870006159, + "learning_rate": 0.0002380736795771777, + "loss": 1.5384, + "step": 4190 + }, + { + "epoch": 2.7056165267914785, + "grad_norm": 1.249940501301107, + "learning_rate": 0.00023802137432915628, + "loss": 2.0771, + "step": 4191 + }, + { + "epoch": 2.7062621045836024, + "grad_norm": 1.202951860296059, + "learning_rate": 0.0002379690527522561, + "loss": 1.7846, + "step": 4192 + }, + { + "epoch": 2.7069076823757263, + "grad_norm": 1.1480666852491692, + "learning_rate": 0.00023791671485618324, + "loss": 2.0126, + "step": 4193 + }, + { + "epoch": 2.7075532601678503, + "grad_norm": 1.2765035534024998, + "learning_rate": 0.00023786436065064708, + "loss": 2.0594, + "step": 4194 + }, + { + "epoch": 2.708198837959974, + "grad_norm": 1.3057896400365996, + "learning_rate": 0.0002378119901453597, + "loss": 2.1468, + "step": 4195 + }, + { + "epoch": 2.708844415752098, + "grad_norm": 1.1503915978103554, + "learning_rate": 0.0002377596033500365, + "loss": 2.2173, + "step": 4196 + }, + { + "epoch": 2.709489993544222, + "grad_norm": 1.2235411074206692, + "learning_rate": 0.00023770720027439568, + "loss": 1.8576, + "step": 4197 + }, + { + "epoch": 2.710135571336346, + "grad_norm": 1.1580842304162076, + "learning_rate": 0.0002376547809281586, + "loss": 2.0981, + "step": 4198 + }, + { + "epoch": 2.71078114912847, + "grad_norm": 1.1442332742598504, + "learning_rate": 0.00023760234532104962, + "loss": 1.5436, + "step": 4199 + }, + { + "epoch": 2.711426726920594, + "grad_norm": 1.179865599048504, + "learning_rate": 0.000237549893462796, + "loss": 2.0728, + "step": 4200 + }, + { + "epoch": 2.7120723047127178, + "grad_norm": 1.2754762167711087, + "learning_rate": 0.00023749742536312824, + "loss": 1.8508, + "step": 4201 + }, + { + "epoch": 2.7127178825048417, + "grad_norm": 1.1880678540302547, + "learning_rate": 0.0002374449410317796, + "loss": 1.9328, + "step": 4202 + }, + { + "epoch": 2.7133634602969656, + "grad_norm": 1.208374749359109, + "learning_rate": 0.0002373924404784865, + "loss": 2.0726, + "step": 4203 + }, + { + "epoch": 2.71400903808909, + "grad_norm": 1.2297426637680713, + "learning_rate": 0.00023733992371298835, + "loss": 2.0673, + "step": 4204 + }, + { + "epoch": 2.714654615881214, + "grad_norm": 1.2780234066635514, + "learning_rate": 0.00023728739074502754, + "loss": 1.916, + "step": 4205 + }, + { + "epoch": 2.715300193673338, + "grad_norm": 1.2039157297240681, + "learning_rate": 0.00023723484158434953, + "loss": 2.0591, + "step": 4206 + }, + { + "epoch": 2.715945771465462, + "grad_norm": 1.1648251022102152, + "learning_rate": 0.00023718227624070263, + "loss": 1.9992, + "step": 4207 + }, + { + "epoch": 2.7165913492575857, + "grad_norm": 1.183972245772888, + "learning_rate": 0.00023712969472383837, + "loss": 1.8894, + "step": 4208 + }, + { + "epoch": 2.7172369270497096, + "grad_norm": 1.2749915341414009, + "learning_rate": 0.00023707709704351112, + "loss": 2.194, + "step": 4209 + }, + { + "epoch": 2.7178825048418336, + "grad_norm": 1.1603058290438069, + "learning_rate": 0.00023702448320947828, + "loss": 2.1893, + "step": 4210 + }, + { + "epoch": 2.7185280826339575, + "grad_norm": 1.1365475056193681, + "learning_rate": 0.0002369718532315003, + "loss": 1.7418, + "step": 4211 + }, + { + "epoch": 2.7191736604260814, + "grad_norm": 1.2705737640038253, + "learning_rate": 0.00023691920711934057, + "loss": 1.8271, + "step": 4212 + }, + { + "epoch": 2.7198192382182054, + "grad_norm": 1.175197978926982, + "learning_rate": 0.00023686654488276547, + "loss": 2.0879, + "step": 4213 + }, + { + "epoch": 2.7204648160103293, + "grad_norm": 1.1659150206673965, + "learning_rate": 0.0002368138665315444, + "loss": 2.2422, + "step": 4214 + }, + { + "epoch": 2.7211103938024532, + "grad_norm": 1.1972094110891087, + "learning_rate": 0.00023676117207544977, + "loss": 1.916, + "step": 4215 + }, + { + "epoch": 2.721755971594577, + "grad_norm": 1.1877829397372188, + "learning_rate": 0.00023670846152425688, + "loss": 2.1396, + "step": 4216 + }, + { + "epoch": 2.722401549386701, + "grad_norm": 1.1865620063656872, + "learning_rate": 0.00023665573488774422, + "loss": 1.9434, + "step": 4217 + }, + { + "epoch": 2.723047127178825, + "grad_norm": 1.0677010083023604, + "learning_rate": 0.00023660299217569299, + "loss": 2.0455, + "step": 4218 + }, + { + "epoch": 2.723692704970949, + "grad_norm": 1.0923876320132355, + "learning_rate": 0.00023655023339788755, + "loss": 1.601, + "step": 4219 + }, + { + "epoch": 2.724338282763073, + "grad_norm": 1.1822934518856605, + "learning_rate": 0.00023649745856411524, + "loss": 2.1573, + "step": 4220 + }, + { + "epoch": 2.724983860555197, + "grad_norm": 1.27776102044224, + "learning_rate": 0.0002364446676841663, + "loss": 2.0694, + "step": 4221 + }, + { + "epoch": 2.7256294383473207, + "grad_norm": 1.2629634019514535, + "learning_rate": 0.000236391860767834, + "loss": 1.9342, + "step": 4222 + }, + { + "epoch": 2.7262750161394447, + "grad_norm": 1.230658850914159, + "learning_rate": 0.0002363390378249146, + "loss": 1.903, + "step": 4223 + }, + { + "epoch": 2.7269205939315686, + "grad_norm": 1.1797729157251569, + "learning_rate": 0.0002362861988652072, + "loss": 2.1838, + "step": 4224 + }, + { + "epoch": 2.7275661717236925, + "grad_norm": 1.346687302058428, + "learning_rate": 0.0002362333438985141, + "loss": 2.0853, + "step": 4225 + }, + { + "epoch": 2.7282117495158165, + "grad_norm": 1.1729763281043415, + "learning_rate": 0.0002361804729346404, + "loss": 2.0154, + "step": 4226 + }, + { + "epoch": 2.7288573273079404, + "grad_norm": 1.225592832467952, + "learning_rate": 0.0002361275859833942, + "loss": 1.8807, + "step": 4227 + }, + { + "epoch": 2.7295029051000643, + "grad_norm": 1.2746577074496404, + "learning_rate": 0.00023607468305458662, + "loss": 2.0439, + "step": 4228 + }, + { + "epoch": 2.7301484828921883, + "grad_norm": 1.2444429484869362, + "learning_rate": 0.00023602176415803163, + "loss": 1.9928, + "step": 4229 + }, + { + "epoch": 2.730794060684312, + "grad_norm": 1.0817826866067932, + "learning_rate": 0.00023596882930354625, + "loss": 1.6505, + "step": 4230 + }, + { + "epoch": 2.7314396384764366, + "grad_norm": 1.1394109845477667, + "learning_rate": 0.00023591587850095054, + "loss": 2.1786, + "step": 4231 + }, + { + "epoch": 2.7320852162685605, + "grad_norm": 1.3343616963471303, + "learning_rate": 0.00023586291176006725, + "loss": 2.3024, + "step": 4232 + }, + { + "epoch": 2.7327307940606844, + "grad_norm": 1.2796396310703109, + "learning_rate": 0.00023580992909072243, + "loss": 2.151, + "step": 4233 + }, + { + "epoch": 2.7333763718528084, + "grad_norm": 1.256180103820153, + "learning_rate": 0.00023575693050274487, + "loss": 1.9999, + "step": 4234 + }, + { + "epoch": 2.7340219496449323, + "grad_norm": 1.1914753807844785, + "learning_rate": 0.00023570391600596629, + "loss": 2.0212, + "step": 4235 + }, + { + "epoch": 2.734667527437056, + "grad_norm": 1.1988102656684396, + "learning_rate": 0.0002356508856102215, + "loss": 2.0403, + "step": 4236 + }, + { + "epoch": 2.73531310522918, + "grad_norm": 1.302229510264407, + "learning_rate": 0.00023559783932534814, + "loss": 1.9132, + "step": 4237 + }, + { + "epoch": 2.735958683021304, + "grad_norm": 1.0901273641613496, + "learning_rate": 0.0002355447771611869, + "loss": 1.7367, + "step": 4238 + }, + { + "epoch": 2.736604260813428, + "grad_norm": 1.1551830559667176, + "learning_rate": 0.00023549169912758132, + "loss": 2.0053, + "step": 4239 + }, + { + "epoch": 2.737249838605552, + "grad_norm": 1.1677515978207187, + "learning_rate": 0.00023543860523437797, + "loss": 2.0056, + "step": 4240 + }, + { + "epoch": 2.737895416397676, + "grad_norm": 1.1566260276905664, + "learning_rate": 0.00023538549549142622, + "loss": 1.8708, + "step": 4241 + }, + { + "epoch": 2.7385409941898, + "grad_norm": 1.1043144871605939, + "learning_rate": 0.0002353323699085786, + "loss": 1.8261, + "step": 4242 + }, + { + "epoch": 2.7391865719819237, + "grad_norm": 1.3262451113309055, + "learning_rate": 0.0002352792284956904, + "loss": 2.251, + "step": 4243 + }, + { + "epoch": 2.7398321497740477, + "grad_norm": 1.212980911684163, + "learning_rate": 0.0002352260712626199, + "loss": 1.8677, + "step": 4244 + }, + { + "epoch": 2.7404777275661716, + "grad_norm": 1.2395742521346875, + "learning_rate": 0.00023517289821922834, + "loss": 2.2604, + "step": 4245 + }, + { + "epoch": 2.7411233053582955, + "grad_norm": 1.1716270298076414, + "learning_rate": 0.00023511970937537983, + "loss": 2.0411, + "step": 4246 + }, + { + "epoch": 2.74176888315042, + "grad_norm": 1.140698987565521, + "learning_rate": 0.00023506650474094156, + "loss": 1.9581, + "step": 4247 + }, + { + "epoch": 2.742414460942544, + "grad_norm": 1.1920190458497546, + "learning_rate": 0.00023501328432578338, + "loss": 2.0217, + "step": 4248 + }, + { + "epoch": 2.7430600387346677, + "grad_norm": 1.2755840722903566, + "learning_rate": 0.0002349600481397783, + "loss": 2.306, + "step": 4249 + }, + { + "epoch": 2.7437056165267917, + "grad_norm": 1.2120045343152277, + "learning_rate": 0.00023490679619280228, + "loss": 2.3261, + "step": 4250 + }, + { + "epoch": 2.7443511943189156, + "grad_norm": 1.152831887978163, + "learning_rate": 0.000234853528494734, + "loss": 2.0682, + "step": 4251 + }, + { + "epoch": 2.7449967721110395, + "grad_norm": 1.1507700983987632, + "learning_rate": 0.00023480024505545516, + "loss": 1.9301, + "step": 4252 + }, + { + "epoch": 2.7456423499031635, + "grad_norm": 1.0919747905898498, + "learning_rate": 0.00023474694588485046, + "loss": 1.8949, + "step": 4253 + }, + { + "epoch": 2.7462879276952874, + "grad_norm": 1.159197564580571, + "learning_rate": 0.0002346936309928074, + "loss": 1.8206, + "step": 4254 + }, + { + "epoch": 2.7469335054874113, + "grad_norm": 1.2223659986467597, + "learning_rate": 0.00023464030038921647, + "loss": 2.0621, + "step": 4255 + }, + { + "epoch": 2.7475790832795353, + "grad_norm": 1.2327110501341656, + "learning_rate": 0.00023458695408397107, + "loss": 2.1933, + "step": 4256 + }, + { + "epoch": 2.748224661071659, + "grad_norm": 1.3836745456928758, + "learning_rate": 0.0002345335920869674, + "loss": 2.2842, + "step": 4257 + }, + { + "epoch": 2.748870238863783, + "grad_norm": 1.192686124106353, + "learning_rate": 0.00023448021440810481, + "loss": 2.007, + "step": 4258 + }, + { + "epoch": 2.749515816655907, + "grad_norm": 1.1414717267778203, + "learning_rate": 0.00023442682105728535, + "loss": 2.0114, + "step": 4259 + }, + { + "epoch": 2.750161394448031, + "grad_norm": 1.1740206499234296, + "learning_rate": 0.00023437341204441396, + "loss": 2.0995, + "step": 4260 + }, + { + "epoch": 2.750806972240155, + "grad_norm": 1.2378415605626953, + "learning_rate": 0.00023431998737939865, + "loss": 2.1802, + "step": 4261 + }, + { + "epoch": 2.751452550032279, + "grad_norm": 1.2248171346209722, + "learning_rate": 0.00023426654707215029, + "loss": 2.0887, + "step": 4262 + }, + { + "epoch": 2.7520981278244028, + "grad_norm": 1.407955711658792, + "learning_rate": 0.0002342130911325825, + "loss": 2.371, + "step": 4263 + }, + { + "epoch": 2.7527437056165267, + "grad_norm": 1.1381709439787153, + "learning_rate": 0.00023415961957061197, + "loss": 1.8469, + "step": 4264 + }, + { + "epoch": 2.7533892834086506, + "grad_norm": 1.0935612311288485, + "learning_rate": 0.00023410613239615828, + "loss": 1.8635, + "step": 4265 + }, + { + "epoch": 2.7540348612007746, + "grad_norm": 1.3547488495782953, + "learning_rate": 0.00023405262961914374, + "loss": 2.0018, + "step": 4266 + }, + { + "epoch": 2.7546804389928985, + "grad_norm": 1.094007378823429, + "learning_rate": 0.0002339991112494938, + "loss": 2.1127, + "step": 4267 + }, + { + "epoch": 2.7553260167850224, + "grad_norm": 1.2594489156871709, + "learning_rate": 0.00023394557729713652, + "loss": 2.1429, + "step": 4268 + }, + { + "epoch": 2.7559715945771464, + "grad_norm": 1.2137170627736007, + "learning_rate": 0.00023389202777200318, + "loss": 1.9601, + "step": 4269 + }, + { + "epoch": 2.7566171723692703, + "grad_norm": 1.2054654911827936, + "learning_rate": 0.00023383846268402764, + "loss": 2.0819, + "step": 4270 + }, + { + "epoch": 2.757262750161394, + "grad_norm": 1.1045285320978504, + "learning_rate": 0.00023378488204314684, + "loss": 2.059, + "step": 4271 + }, + { + "epoch": 2.757908327953518, + "grad_norm": 1.1667883383171307, + "learning_rate": 0.00023373128585930052, + "loss": 2.2274, + "step": 4272 + }, + { + "epoch": 2.758553905745642, + "grad_norm": 1.1093943310024414, + "learning_rate": 0.00023367767414243134, + "loss": 1.8945, + "step": 4273 + }, + { + "epoch": 2.7591994835377665, + "grad_norm": 1.0582117856725262, + "learning_rate": 0.00023362404690248486, + "loss": 2.0506, + "step": 4274 + }, + { + "epoch": 2.7598450613298904, + "grad_norm": 1.2480741261756605, + "learning_rate": 0.00023357040414940944, + "loss": 1.8999, + "step": 4275 + }, + { + "epoch": 2.7604906391220143, + "grad_norm": 1.2232517989094018, + "learning_rate": 0.0002335167458931564, + "loss": 2.1781, + "step": 4276 + }, + { + "epoch": 2.7611362169141382, + "grad_norm": 1.0493917059065023, + "learning_rate": 0.00023346307214367983, + "loss": 1.814, + "step": 4277 + }, + { + "epoch": 2.761781794706262, + "grad_norm": 1.2594481482440507, + "learning_rate": 0.00023340938291093686, + "loss": 2.1451, + "step": 4278 + }, + { + "epoch": 2.762427372498386, + "grad_norm": 1.2148312472794354, + "learning_rate": 0.0002333556782048874, + "loss": 2.171, + "step": 4279 + }, + { + "epoch": 2.76307295029051, + "grad_norm": 1.2001857758080128, + "learning_rate": 0.00023330195803549414, + "loss": 2.208, + "step": 4280 + }, + { + "epoch": 2.763718528082634, + "grad_norm": 1.2236040530368515, + "learning_rate": 0.00023324822241272284, + "loss": 2.1068, + "step": 4281 + }, + { + "epoch": 2.764364105874758, + "grad_norm": 1.119423660305184, + "learning_rate": 0.00023319447134654195, + "loss": 1.8541, + "step": 4282 + }, + { + "epoch": 2.765009683666882, + "grad_norm": 1.1093096552393422, + "learning_rate": 0.00023314070484692283, + "loss": 1.9334, + "step": 4283 + }, + { + "epoch": 2.7656552614590058, + "grad_norm": 1.0961855440861363, + "learning_rate": 0.0002330869229238398, + "loss": 1.8628, + "step": 4284 + }, + { + "epoch": 2.7663008392511297, + "grad_norm": 1.228413989607419, + "learning_rate": 0.0002330331255872699, + "loss": 2.1279, + "step": 4285 + }, + { + "epoch": 2.7669464170432536, + "grad_norm": 1.1545456854274743, + "learning_rate": 0.00023297931284719313, + "loss": 1.9502, + "step": 4286 + }, + { + "epoch": 2.7675919948353775, + "grad_norm": 1.1275763822472438, + "learning_rate": 0.00023292548471359233, + "loss": 1.6753, + "step": 4287 + }, + { + "epoch": 2.7682375726275015, + "grad_norm": 1.0579779399419238, + "learning_rate": 0.0002328716411964531, + "loss": 1.7848, + "step": 4288 + }, + { + "epoch": 2.7688831504196254, + "grad_norm": 1.151465953410706, + "learning_rate": 0.00023281778230576408, + "loss": 2.0816, + "step": 4289 + }, + { + "epoch": 2.76952872821175, + "grad_norm": 1.1750259438119943, + "learning_rate": 0.00023276390805151653, + "loss": 2.0596, + "step": 4290 + }, + { + "epoch": 2.7701743060038737, + "grad_norm": 1.1196476682833596, + "learning_rate": 0.00023271001844370475, + "loss": 2.0467, + "step": 4291 + }, + { + "epoch": 2.7708198837959976, + "grad_norm": 1.1798530095599284, + "learning_rate": 0.00023265611349232584, + "loss": 2.0508, + "step": 4292 + }, + { + "epoch": 2.7714654615881216, + "grad_norm": 1.3970206938791605, + "learning_rate": 0.00023260219320737972, + "loss": 2.1087, + "step": 4293 + }, + { + "epoch": 2.7721110393802455, + "grad_norm": 1.102152939122843, + "learning_rate": 0.0002325482575988691, + "loss": 1.952, + "step": 4294 + }, + { + "epoch": 2.7727566171723694, + "grad_norm": 1.3817730513521813, + "learning_rate": 0.00023249430667679968, + "loss": 2.2966, + "step": 4295 + }, + { + "epoch": 2.7734021949644934, + "grad_norm": 1.2637498413790853, + "learning_rate": 0.00023244034045117988, + "loss": 2.0628, + "step": 4296 + }, + { + "epoch": 2.7740477727566173, + "grad_norm": 1.2963931794248167, + "learning_rate": 0.00023238635893202097, + "loss": 2.1986, + "step": 4297 + }, + { + "epoch": 2.774693350548741, + "grad_norm": 1.319599094628909, + "learning_rate": 0.00023233236212933718, + "loss": 2.4222, + "step": 4298 + }, + { + "epoch": 2.775338928340865, + "grad_norm": 1.0963378271839608, + "learning_rate": 0.0002322783500531453, + "loss": 1.9371, + "step": 4299 + }, + { + "epoch": 2.775984506132989, + "grad_norm": 1.0369023068466592, + "learning_rate": 0.0002322243227134653, + "loss": 1.7765, + "step": 4300 + }, + { + "epoch": 2.776630083925113, + "grad_norm": 1.0937027196387192, + "learning_rate": 0.00023217028012031973, + "loss": 1.951, + "step": 4301 + }, + { + "epoch": 2.777275661717237, + "grad_norm": 1.2152058514533206, + "learning_rate": 0.00023211622228373405, + "loss": 2.0539, + "step": 4302 + }, + { + "epoch": 2.777921239509361, + "grad_norm": 1.2726764369260601, + "learning_rate": 0.00023206214921373664, + "loss": 2.2086, + "step": 4303 + }, + { + "epoch": 2.778566817301485, + "grad_norm": 1.215374754827723, + "learning_rate": 0.00023200806092035848, + "loss": 1.7451, + "step": 4304 + }, + { + "epoch": 2.7792123950936087, + "grad_norm": 1.2197667842732896, + "learning_rate": 0.0002319539574136336, + "loss": 2.2729, + "step": 4305 + }, + { + "epoch": 2.7798579728857327, + "grad_norm": 1.0653858418605375, + "learning_rate": 0.00023189983870359874, + "loss": 1.8626, + "step": 4306 + }, + { + "epoch": 2.7805035506778566, + "grad_norm": 1.113046209720958, + "learning_rate": 0.00023184570480029346, + "loss": 1.5652, + "step": 4307 + }, + { + "epoch": 2.7811491284699805, + "grad_norm": 1.1993305806743997, + "learning_rate": 0.0002317915557137602, + "loss": 1.9488, + "step": 4308 + }, + { + "epoch": 2.7817947062621045, + "grad_norm": 0.9873247511225917, + "learning_rate": 0.0002317373914540442, + "loss": 1.5931, + "step": 4309 + }, + { + "epoch": 2.7824402840542284, + "grad_norm": 1.1356704539026377, + "learning_rate": 0.0002316832120311934, + "loss": 2.1466, + "step": 4310 + }, + { + "epoch": 2.7830858618463523, + "grad_norm": 1.1457300370855483, + "learning_rate": 0.0002316290174552587, + "loss": 1.9665, + "step": 4311 + }, + { + "epoch": 2.7837314396384762, + "grad_norm": 1.1821697211135538, + "learning_rate": 0.0002315748077362938, + "loss": 1.9951, + "step": 4312 + }, + { + "epoch": 2.7843770174306, + "grad_norm": 1.2801514380407493, + "learning_rate": 0.00023152058288435505, + "loss": 2.0465, + "step": 4313 + }, + { + "epoch": 2.785022595222724, + "grad_norm": 1.2530841550991374, + "learning_rate": 0.00023146634290950184, + "loss": 2.1139, + "step": 4314 + }, + { + "epoch": 2.785668173014848, + "grad_norm": 1.2378164713143562, + "learning_rate": 0.00023141208782179618, + "loss": 1.756, + "step": 4315 + }, + { + "epoch": 2.786313750806972, + "grad_norm": 1.1531995644433073, + "learning_rate": 0.00023135781763130298, + "loss": 2.076, + "step": 4316 + }, + { + "epoch": 2.7869593285990963, + "grad_norm": 1.2097641605193883, + "learning_rate": 0.00023130353234808996, + "loss": 2.0715, + "step": 4317 + }, + { + "epoch": 2.7876049063912203, + "grad_norm": 1.1809269921217325, + "learning_rate": 0.0002312492319822275, + "loss": 2.362, + "step": 4318 + }, + { + "epoch": 2.788250484183344, + "grad_norm": 1.2010231342316353, + "learning_rate": 0.000231194916543789, + "loss": 2.1042, + "step": 4319 + }, + { + "epoch": 2.788896061975468, + "grad_norm": 1.1519790020862124, + "learning_rate": 0.0002311405860428504, + "loss": 2.1265, + "step": 4320 + }, + { + "epoch": 2.789541639767592, + "grad_norm": 1.1717518577877435, + "learning_rate": 0.0002310862404894907, + "loss": 1.9694, + "step": 4321 + }, + { + "epoch": 2.790187217559716, + "grad_norm": 1.1513437940831408, + "learning_rate": 0.00023103187989379154, + "loss": 2.098, + "step": 4322 + }, + { + "epoch": 2.79083279535184, + "grad_norm": 1.1417115056301024, + "learning_rate": 0.00023097750426583733, + "loss": 2.0151, + "step": 4323 + }, + { + "epoch": 2.791478373143964, + "grad_norm": 1.0643352254296912, + "learning_rate": 0.00023092311361571532, + "loss": 1.7591, + "step": 4324 + }, + { + "epoch": 2.792123950936088, + "grad_norm": 1.1287082731054212, + "learning_rate": 0.0002308687079535156, + "loss": 1.9365, + "step": 4325 + }, + { + "epoch": 2.7927695287282117, + "grad_norm": 1.2784979235860614, + "learning_rate": 0.0002308142872893309, + "loss": 2.1225, + "step": 4326 + }, + { + "epoch": 2.7934151065203356, + "grad_norm": 1.103536843330155, + "learning_rate": 0.00023075985163325687, + "loss": 2.1148, + "step": 4327 + }, + { + "epoch": 2.7940606843124596, + "grad_norm": 1.1701846961818474, + "learning_rate": 0.0002307054009953919, + "loss": 2.2124, + "step": 4328 + }, + { + "epoch": 2.7947062621045835, + "grad_norm": 1.0951401590144456, + "learning_rate": 0.00023065093538583711, + "loss": 1.5852, + "step": 4329 + }, + { + "epoch": 2.7953518398967074, + "grad_norm": 1.302488966984141, + "learning_rate": 0.00023059645481469644, + "loss": 2.3046, + "step": 4330 + }, + { + "epoch": 2.7959974176888314, + "grad_norm": 1.3150799616986195, + "learning_rate": 0.00023054195929207662, + "loss": 2.2001, + "step": 4331 + }, + { + "epoch": 2.7966429954809553, + "grad_norm": 1.1634173737343727, + "learning_rate": 0.00023048744882808711, + "loss": 2.149, + "step": 4332 + }, + { + "epoch": 2.7972885732730797, + "grad_norm": 1.2313507625895923, + "learning_rate": 0.0002304329234328402, + "loss": 2.2067, + "step": 4333 + }, + { + "epoch": 2.7979341510652036, + "grad_norm": 1.2568925438692924, + "learning_rate": 0.00023037838311645094, + "loss": 2.2758, + "step": 4334 + }, + { + "epoch": 2.7985797288573275, + "grad_norm": 1.1386752392865114, + "learning_rate": 0.00023032382788903703, + "loss": 1.8246, + "step": 4335 + }, + { + "epoch": 2.7992253066494515, + "grad_norm": 1.3024954611669854, + "learning_rate": 0.00023026925776071917, + "loss": 2.2288, + "step": 4336 + }, + { + "epoch": 2.7998708844415754, + "grad_norm": 1.2101863696563988, + "learning_rate": 0.00023021467274162056, + "loss": 1.819, + "step": 4337 + }, + { + "epoch": 2.8005164622336993, + "grad_norm": 1.2423117356785778, + "learning_rate": 0.0002301600728418673, + "loss": 2.1411, + "step": 4338 + }, + { + "epoch": 2.8011620400258233, + "grad_norm": 1.0781584683797665, + "learning_rate": 0.00023010545807158834, + "loss": 1.7036, + "step": 4339 + }, + { + "epoch": 2.801807617817947, + "grad_norm": 1.082218560085497, + "learning_rate": 0.0002300508284409152, + "loss": 1.7721, + "step": 4340 + }, + { + "epoch": 2.802453195610071, + "grad_norm": 1.1610606814691646, + "learning_rate": 0.00022999618395998223, + "loss": 1.7105, + "step": 4341 + }, + { + "epoch": 2.803098773402195, + "grad_norm": 1.2848679293306742, + "learning_rate": 0.00022994152463892662, + "loss": 2.1063, + "step": 4342 + }, + { + "epoch": 2.803744351194319, + "grad_norm": 1.486228530061675, + "learning_rate": 0.00022988685048788812, + "loss": 2.298, + "step": 4343 + }, + { + "epoch": 2.804389928986443, + "grad_norm": 1.3335430993978163, + "learning_rate": 0.00022983216151700953, + "loss": 2.1934, + "step": 4344 + }, + { + "epoch": 2.805035506778567, + "grad_norm": 1.1622535477126263, + "learning_rate": 0.0002297774577364361, + "loss": 1.9743, + "step": 4345 + }, + { + "epoch": 2.8056810845706908, + "grad_norm": 1.138110664333729, + "learning_rate": 0.00022972273915631595, + "loss": 1.9715, + "step": 4346 + }, + { + "epoch": 2.8063266623628147, + "grad_norm": 1.1715795019710002, + "learning_rate": 0.00022966800578680003, + "loss": 2.1349, + "step": 4347 + }, + { + "epoch": 2.8069722401549386, + "grad_norm": 1.2654508218891867, + "learning_rate": 0.00022961325763804183, + "loss": 2.2047, + "step": 4348 + }, + { + "epoch": 2.8076178179470626, + "grad_norm": 1.1704235769374203, + "learning_rate": 0.00022955849472019774, + "loss": 2.1973, + "step": 4349 + }, + { + "epoch": 2.8082633957391865, + "grad_norm": 1.3560839380596126, + "learning_rate": 0.00022950371704342693, + "loss": 2.3023, + "step": 4350 + }, + { + "epoch": 2.8089089735313104, + "grad_norm": 1.3204832062355563, + "learning_rate": 0.00022944892461789117, + "loss": 2.0815, + "step": 4351 + }, + { + "epoch": 2.8095545513234343, + "grad_norm": 1.0689472485252896, + "learning_rate": 0.00022939411745375498, + "loss": 2.0445, + "step": 4352 + }, + { + "epoch": 2.8102001291155583, + "grad_norm": 1.0676868159108759, + "learning_rate": 0.0002293392955611857, + "loss": 1.8445, + "step": 4353 + }, + { + "epoch": 2.810845706907682, + "grad_norm": 1.092611443444701, + "learning_rate": 0.0002292844589503534, + "loss": 1.6553, + "step": 4354 + }, + { + "epoch": 2.811491284699806, + "grad_norm": 1.1756161176421607, + "learning_rate": 0.00022922960763143075, + "loss": 2.0269, + "step": 4355 + }, + { + "epoch": 2.81213686249193, + "grad_norm": 1.2856695703629213, + "learning_rate": 0.0002291747416145933, + "loss": 2.2678, + "step": 4356 + }, + { + "epoch": 2.812782440284054, + "grad_norm": 1.2708659919239162, + "learning_rate": 0.00022911986091001926, + "loss": 1.9641, + "step": 4357 + }, + { + "epoch": 2.813428018076178, + "grad_norm": 1.1624979211422666, + "learning_rate": 0.00022906496552788956, + "loss": 1.9133, + "step": 4358 + }, + { + "epoch": 2.814073595868302, + "grad_norm": 1.3553266159935895, + "learning_rate": 0.0002290100554783879, + "loss": 2.3671, + "step": 4359 + }, + { + "epoch": 2.8147191736604262, + "grad_norm": 1.2764874238393593, + "learning_rate": 0.00022895513077170058, + "loss": 1.8305, + "step": 4360 + }, + { + "epoch": 2.81536475145255, + "grad_norm": 1.222910972284332, + "learning_rate": 0.00022890019141801678, + "loss": 2.2114, + "step": 4361 + }, + { + "epoch": 2.816010329244674, + "grad_norm": 1.3204913894980577, + "learning_rate": 0.00022884523742752833, + "loss": 2.0665, + "step": 4362 + }, + { + "epoch": 2.816655907036798, + "grad_norm": 1.3204923357717224, + "learning_rate": 0.00022879026881042968, + "loss": 2.2003, + "step": 4363 + }, + { + "epoch": 2.817301484828922, + "grad_norm": 1.2741606103513496, + "learning_rate": 0.0002287352855769182, + "loss": 2.1819, + "step": 4364 + }, + { + "epoch": 2.817947062621046, + "grad_norm": 1.2006809438587733, + "learning_rate": 0.0002286802877371937, + "loss": 1.7038, + "step": 4365 + }, + { + "epoch": 2.81859264041317, + "grad_norm": 1.1591649310317276, + "learning_rate": 0.00022862527530145897, + "loss": 2.0319, + "step": 4366 + }, + { + "epoch": 2.8192382182052937, + "grad_norm": 1.3197069148419365, + "learning_rate": 0.00022857024827991934, + "loss": 2.1632, + "step": 4367 + }, + { + "epoch": 2.8198837959974177, + "grad_norm": 1.1526491514416548, + "learning_rate": 0.0002285152066827829, + "loss": 1.9014, + "step": 4368 + }, + { + "epoch": 2.8205293737895416, + "grad_norm": 1.2941420793492344, + "learning_rate": 0.00022846015052026045, + "loss": 2.1313, + "step": 4369 + }, + { + "epoch": 2.8211749515816655, + "grad_norm": 1.1159093828739661, + "learning_rate": 0.00022840507980256548, + "loss": 1.9588, + "step": 4370 + }, + { + "epoch": 2.8218205293737895, + "grad_norm": 1.2450332647703826, + "learning_rate": 0.0002283499945399142, + "loss": 2.1236, + "step": 4371 + }, + { + "epoch": 2.8224661071659134, + "grad_norm": 1.1433384933726924, + "learning_rate": 0.00022829489474252544, + "loss": 2.148, + "step": 4372 + }, + { + "epoch": 2.8231116849580373, + "grad_norm": 1.2622995519685563, + "learning_rate": 0.00022823978042062082, + "loss": 2.0456, + "step": 4373 + }, + { + "epoch": 2.8237572627501613, + "grad_norm": 1.161414161164808, + "learning_rate": 0.00022818465158442453, + "loss": 1.6349, + "step": 4374 + }, + { + "epoch": 2.824402840542285, + "grad_norm": 1.1609785780571493, + "learning_rate": 0.00022812950824416375, + "loss": 2.0476, + "step": 4375 + }, + { + "epoch": 2.8250484183344096, + "grad_norm": 1.078254567080054, + "learning_rate": 0.00022807435041006797, + "loss": 1.6303, + "step": 4376 + }, + { + "epoch": 2.8256939961265335, + "grad_norm": 1.2024760143004352, + "learning_rate": 0.00022801917809236956, + "loss": 2.0311, + "step": 4377 + }, + { + "epoch": 2.8263395739186574, + "grad_norm": 1.167018594519023, + "learning_rate": 0.00022796399130130363, + "loss": 2.1493, + "step": 4378 + }, + { + "epoch": 2.8269851517107814, + "grad_norm": 1.2211567578313303, + "learning_rate": 0.00022790879004710784, + "loss": 2.0223, + "step": 4379 + }, + { + "epoch": 2.8276307295029053, + "grad_norm": 1.3626225829422067, + "learning_rate": 0.0002278535743400226, + "loss": 2.1242, + "step": 4380 + }, + { + "epoch": 2.828276307295029, + "grad_norm": 1.3143837126396922, + "learning_rate": 0.000227798344190291, + "loss": 2.0206, + "step": 4381 + }, + { + "epoch": 2.828921885087153, + "grad_norm": 1.1219163399893535, + "learning_rate": 0.00022774309960815886, + "loss": 1.9902, + "step": 4382 + }, + { + "epoch": 2.829567462879277, + "grad_norm": 1.1408082568976066, + "learning_rate": 0.00022768784060387458, + "loss": 2.1547, + "step": 4383 + }, + { + "epoch": 2.830213040671401, + "grad_norm": 1.1148705841967284, + "learning_rate": 0.00022763256718768925, + "loss": 2.0918, + "step": 4384 + }, + { + "epoch": 2.830858618463525, + "grad_norm": 1.1759139251830943, + "learning_rate": 0.00022757727936985667, + "loss": 2.1352, + "step": 4385 + }, + { + "epoch": 2.831504196255649, + "grad_norm": 1.215359174960207, + "learning_rate": 0.0002275219771606334, + "loss": 2.1883, + "step": 4386 + }, + { + "epoch": 2.832149774047773, + "grad_norm": 1.2549331882288342, + "learning_rate": 0.00022746666057027842, + "loss": 2.1857, + "step": 4387 + }, + { + "epoch": 2.8327953518398967, + "grad_norm": 1.1113591723786824, + "learning_rate": 0.00022741132960905363, + "loss": 2.0604, + "step": 4388 + }, + { + "epoch": 2.8334409296320207, + "grad_norm": 1.1266793251931897, + "learning_rate": 0.00022735598428722347, + "loss": 2.0849, + "step": 4389 + }, + { + "epoch": 2.8340865074241446, + "grad_norm": 1.2480732348357286, + "learning_rate": 0.00022730062461505518, + "loss": 1.8967, + "step": 4390 + }, + { + "epoch": 2.8347320852162685, + "grad_norm": 1.0195724874801544, + "learning_rate": 0.00022724525060281836, + "loss": 1.6779, + "step": 4391 + }, + { + "epoch": 2.8353776630083924, + "grad_norm": 1.3007883748931648, + "learning_rate": 0.00022718986226078557, + "loss": 2.2654, + "step": 4392 + }, + { + "epoch": 2.8360232408005164, + "grad_norm": 1.1123723225434072, + "learning_rate": 0.00022713445959923195, + "loss": 1.9028, + "step": 4393 + }, + { + "epoch": 2.8366688185926403, + "grad_norm": 1.2891176603671537, + "learning_rate": 0.0002270790426284352, + "loss": 2.1917, + "step": 4394 + }, + { + "epoch": 2.8373143963847642, + "grad_norm": 1.1724370903915684, + "learning_rate": 0.00022702361135867579, + "loss": 2.0219, + "step": 4395 + }, + { + "epoch": 2.837959974176888, + "grad_norm": 1.2613757962198684, + "learning_rate": 0.00022696816580023677, + "loss": 2.0628, + "step": 4396 + }, + { + "epoch": 2.838605551969012, + "grad_norm": 1.2809685136650946, + "learning_rate": 0.00022691270596340388, + "loss": 2.2177, + "step": 4397 + }, + { + "epoch": 2.839251129761136, + "grad_norm": 1.3948026849905997, + "learning_rate": 0.00022685723185846552, + "loss": 2.2352, + "step": 4398 + }, + { + "epoch": 2.83989670755326, + "grad_norm": 1.1547431118604137, + "learning_rate": 0.00022680174349571261, + "loss": 1.8148, + "step": 4399 + }, + { + "epoch": 2.840542285345384, + "grad_norm": 1.1998857980443158, + "learning_rate": 0.00022674624088543898, + "loss": 2.0555, + "step": 4400 + }, + { + "epoch": 2.841187863137508, + "grad_norm": 1.2497565493155502, + "learning_rate": 0.00022669072403794084, + "loss": 2.1013, + "step": 4401 + }, + { + "epoch": 2.8418334409296317, + "grad_norm": 1.0549327468641718, + "learning_rate": 0.0002266351929635171, + "loss": 1.711, + "step": 4402 + }, + { + "epoch": 2.842479018721756, + "grad_norm": 1.152119634283262, + "learning_rate": 0.00022657964767246945, + "loss": 2.0223, + "step": 4403 + }, + { + "epoch": 2.84312459651388, + "grad_norm": 1.1868184852677475, + "learning_rate": 0.00022652408817510207, + "loss": 2.0718, + "step": 4404 + }, + { + "epoch": 2.843770174306004, + "grad_norm": 1.3292075210800898, + "learning_rate": 0.0002264685144817218, + "loss": 2.1325, + "step": 4405 + }, + { + "epoch": 2.844415752098128, + "grad_norm": 1.3558086092543742, + "learning_rate": 0.00022641292660263818, + "loss": 2.2451, + "step": 4406 + }, + { + "epoch": 2.845061329890252, + "grad_norm": 1.2895643120056606, + "learning_rate": 0.00022635732454816336, + "loss": 2.0229, + "step": 4407 + }, + { + "epoch": 2.8457069076823758, + "grad_norm": 1.2233362835476587, + "learning_rate": 0.000226301708328612, + "loss": 2.0236, + "step": 4408 + }, + { + "epoch": 2.8463524854744997, + "grad_norm": 1.0928079197538898, + "learning_rate": 0.00022624607795430156, + "loss": 1.9012, + "step": 4409 + }, + { + "epoch": 2.8469980632666236, + "grad_norm": 1.1766402272738934, + "learning_rate": 0.00022619043343555204, + "loss": 2.1175, + "step": 4410 + }, + { + "epoch": 2.8476436410587476, + "grad_norm": 1.2341498364750167, + "learning_rate": 0.00022613477478268616, + "loss": 2.0624, + "step": 4411 + }, + { + "epoch": 2.8482892188508715, + "grad_norm": 1.25890559967813, + "learning_rate": 0.00022607910200602902, + "loss": 2.1088, + "step": 4412 + }, + { + "epoch": 2.8489347966429954, + "grad_norm": 1.2439253064023792, + "learning_rate": 0.00022602341511590853, + "loss": 2.1447, + "step": 4413 + }, + { + "epoch": 2.8495803744351194, + "grad_norm": 1.0952856581275934, + "learning_rate": 0.00022596771412265528, + "loss": 1.9154, + "step": 4414 + }, + { + "epoch": 2.8502259522272433, + "grad_norm": 1.233916537462777, + "learning_rate": 0.00022591199903660236, + "loss": 2.2774, + "step": 4415 + }, + { + "epoch": 2.850871530019367, + "grad_norm": 1.2422610180660978, + "learning_rate": 0.0002258562698680854, + "loss": 2.1804, + "step": 4416 + }, + { + "epoch": 2.851517107811491, + "grad_norm": 1.229100671582723, + "learning_rate": 0.00022580052662744287, + "loss": 2.0589, + "step": 4417 + }, + { + "epoch": 2.852162685603615, + "grad_norm": 1.1886311281970217, + "learning_rate": 0.0002257447693250156, + "loss": 2.0615, + "step": 4418 + }, + { + "epoch": 2.8528082633957395, + "grad_norm": 1.1561921493153788, + "learning_rate": 0.00022568899797114727, + "loss": 2.2244, + "step": 4419 + }, + { + "epoch": 2.8534538411878634, + "grad_norm": 1.2018540749032804, + "learning_rate": 0.0002256332125761839, + "loss": 2.1998, + "step": 4420 + }, + { + "epoch": 2.8540994189799873, + "grad_norm": 1.2610284779732315, + "learning_rate": 0.00022557741315047442, + "loss": 2.4117, + "step": 4421 + }, + { + "epoch": 2.8547449967721112, + "grad_norm": 1.3410471651937024, + "learning_rate": 0.0002255215997043701, + "loss": 2.2913, + "step": 4422 + }, + { + "epoch": 2.855390574564235, + "grad_norm": 1.116029826762406, + "learning_rate": 0.00022546577224822492, + "loss": 2.0597, + "step": 4423 + }, + { + "epoch": 2.856036152356359, + "grad_norm": 1.2544270935749957, + "learning_rate": 0.00022540993079239548, + "loss": 1.9126, + "step": 4424 + }, + { + "epoch": 2.856681730148483, + "grad_norm": 1.1446803140675323, + "learning_rate": 0.00022535407534724094, + "loss": 2.0418, + "step": 4425 + }, + { + "epoch": 2.857327307940607, + "grad_norm": 1.136535510093738, + "learning_rate": 0.0002252982059231231, + "loss": 1.9531, + "step": 4426 + }, + { + "epoch": 2.857972885732731, + "grad_norm": 1.0293409703498317, + "learning_rate": 0.00022524232253040623, + "loss": 1.5807, + "step": 4427 + }, + { + "epoch": 2.858618463524855, + "grad_norm": 1.2827948467860602, + "learning_rate": 0.0002251864251794573, + "loss": 2.0204, + "step": 4428 + }, + { + "epoch": 2.8592640413169788, + "grad_norm": 1.3571462045700076, + "learning_rate": 0.00022513051388064597, + "loss": 2.1673, + "step": 4429 + }, + { + "epoch": 2.8599096191091027, + "grad_norm": 1.1382792725132687, + "learning_rate": 0.0002250745886443442, + "loss": 1.9295, + "step": 4430 + }, + { + "epoch": 2.8605551969012266, + "grad_norm": 1.1876236933113322, + "learning_rate": 0.00022501864948092684, + "loss": 1.5936, + "step": 4431 + }, + { + "epoch": 2.8612007746933505, + "grad_norm": 1.1804469715144352, + "learning_rate": 0.0002249626964007711, + "loss": 1.891, + "step": 4432 + }, + { + "epoch": 2.8618463524854745, + "grad_norm": 1.1524505427026792, + "learning_rate": 0.0002249067294142569, + "loss": 1.9415, + "step": 4433 + }, + { + "epoch": 2.8624919302775984, + "grad_norm": 1.2927383258335556, + "learning_rate": 0.00022485074853176667, + "loss": 2.227, + "step": 4434 + }, + { + "epoch": 2.8631375080697223, + "grad_norm": 1.1907695107806622, + "learning_rate": 0.0002247947537636854, + "loss": 1.9386, + "step": 4435 + }, + { + "epoch": 2.8637830858618463, + "grad_norm": 1.2204449503234374, + "learning_rate": 0.00022473874512040083, + "loss": 1.9158, + "step": 4436 + }, + { + "epoch": 2.86442866365397, + "grad_norm": 1.2819423202231992, + "learning_rate": 0.00022468272261230306, + "loss": 2.0385, + "step": 4437 + }, + { + "epoch": 2.865074241446094, + "grad_norm": 1.195057053973632, + "learning_rate": 0.00022462668624978482, + "loss": 2.0087, + "step": 4438 + }, + { + "epoch": 2.865719819238218, + "grad_norm": 1.2894056195777832, + "learning_rate": 0.0002245706360432415, + "loss": 2.3376, + "step": 4439 + }, + { + "epoch": 2.866365397030342, + "grad_norm": 1.1561193564330543, + "learning_rate": 0.000224514572003071, + "loss": 2.0206, + "step": 4440 + }, + { + "epoch": 2.867010974822466, + "grad_norm": 1.1024339018273812, + "learning_rate": 0.00022445849413967368, + "loss": 1.9054, + "step": 4441 + }, + { + "epoch": 2.86765655261459, + "grad_norm": 1.1044310800121206, + "learning_rate": 0.00022440240246345268, + "loss": 1.9977, + "step": 4442 + }, + { + "epoch": 2.868302130406714, + "grad_norm": 1.2649968938405174, + "learning_rate": 0.00022434629698481356, + "loss": 2.2609, + "step": 4443 + }, + { + "epoch": 2.8689477081988377, + "grad_norm": 1.17989781873606, + "learning_rate": 0.00022429017771416444, + "loss": 2.083, + "step": 4444 + }, + { + "epoch": 2.8695932859909616, + "grad_norm": 1.1114033245719661, + "learning_rate": 0.00022423404466191603, + "loss": 1.6968, + "step": 4445 + }, + { + "epoch": 2.870238863783086, + "grad_norm": 1.3957008206921062, + "learning_rate": 0.0002241778978384816, + "loss": 2.2168, + "step": 4446 + }, + { + "epoch": 2.87088444157521, + "grad_norm": 1.112434377615922, + "learning_rate": 0.000224121737254277, + "loss": 1.7101, + "step": 4447 + }, + { + "epoch": 2.871530019367334, + "grad_norm": 0.9883482548212721, + "learning_rate": 0.00022406556291972057, + "loss": 1.5738, + "step": 4448 + }, + { + "epoch": 2.872175597159458, + "grad_norm": 1.189449151887843, + "learning_rate": 0.0002240093748452332, + "loss": 1.9587, + "step": 4449 + }, + { + "epoch": 2.8728211749515817, + "grad_norm": 1.1986642732227732, + "learning_rate": 0.00022395317304123844, + "loss": 1.9583, + "step": 4450 + }, + { + "epoch": 2.8734667527437057, + "grad_norm": 1.2533459295914695, + "learning_rate": 0.00022389695751816227, + "loss": 2.0676, + "step": 4451 + }, + { + "epoch": 2.8741123305358296, + "grad_norm": 1.2780714558591408, + "learning_rate": 0.0002238407282864332, + "loss": 2.1027, + "step": 4452 + }, + { + "epoch": 2.8747579083279535, + "grad_norm": 1.2797627963743854, + "learning_rate": 0.00022378448535648245, + "loss": 2.2693, + "step": 4453 + }, + { + "epoch": 2.8754034861200775, + "grad_norm": 1.1125226941625308, + "learning_rate": 0.0002237282287387436, + "loss": 2.0189, + "step": 4454 + }, + { + "epoch": 2.8760490639122014, + "grad_norm": 1.2465691308595497, + "learning_rate": 0.00022367195844365278, + "loss": 2.127, + "step": 4455 + }, + { + "epoch": 2.8766946417043253, + "grad_norm": 1.20271556772611, + "learning_rate": 0.00022361567448164884, + "loss": 1.9348, + "step": 4456 + }, + { + "epoch": 2.8773402194964492, + "grad_norm": 1.287048129204122, + "learning_rate": 0.00022355937686317293, + "loss": 2.1078, + "step": 4457 + }, + { + "epoch": 2.877985797288573, + "grad_norm": 1.1942826595985945, + "learning_rate": 0.00022350306559866892, + "loss": 1.9578, + "step": 4458 + }, + { + "epoch": 2.878631375080697, + "grad_norm": 1.3426245333397535, + "learning_rate": 0.0002234467406985831, + "loss": 2.1453, + "step": 4459 + }, + { + "epoch": 2.879276952872821, + "grad_norm": 1.2085039328859306, + "learning_rate": 0.00022339040217336426, + "loss": 1.9986, + "step": 4460 + }, + { + "epoch": 2.879922530664945, + "grad_norm": 1.179359735060249, + "learning_rate": 0.00022333405003346392, + "loss": 2.0629, + "step": 4461 + }, + { + "epoch": 2.8805681084570693, + "grad_norm": 1.1188299783848856, + "learning_rate": 0.0002232776842893359, + "loss": 1.9506, + "step": 4462 + }, + { + "epoch": 2.8812136862491933, + "grad_norm": 1.0913000473365393, + "learning_rate": 0.00022322130495143662, + "loss": 2.0408, + "step": 4463 + }, + { + "epoch": 2.881859264041317, + "grad_norm": 1.0181151795719552, + "learning_rate": 0.00022316491203022508, + "loss": 1.6508, + "step": 4464 + }, + { + "epoch": 2.882504841833441, + "grad_norm": 1.1729062871891691, + "learning_rate": 0.00022310850553616273, + "loss": 2.1828, + "step": 4465 + }, + { + "epoch": 2.883150419625565, + "grad_norm": 1.2036545560868126, + "learning_rate": 0.0002230520854797135, + "loss": 2.0988, + "step": 4466 + }, + { + "epoch": 2.883795997417689, + "grad_norm": 1.2402603861205501, + "learning_rate": 0.00022299565187134404, + "loss": 2.2227, + "step": 4467 + }, + { + "epoch": 2.884441575209813, + "grad_norm": 1.2756723757826878, + "learning_rate": 0.00022293920472152328, + "loss": 2.1219, + "step": 4468 + }, + { + "epoch": 2.885087153001937, + "grad_norm": 1.1437671789542134, + "learning_rate": 0.00022288274404072268, + "loss": 1.959, + "step": 4469 + }, + { + "epoch": 2.885732730794061, + "grad_norm": 1.2221534566586894, + "learning_rate": 0.00022282626983941643, + "loss": 1.9732, + "step": 4470 + }, + { + "epoch": 2.8863783085861847, + "grad_norm": 1.1756707506045423, + "learning_rate": 0.00022276978212808096, + "loss": 2.0933, + "step": 4471 + }, + { + "epoch": 2.8870238863783086, + "grad_norm": 1.3446206790241775, + "learning_rate": 0.00022271328091719544, + "loss": 2.2212, + "step": 4472 + }, + { + "epoch": 2.8876694641704326, + "grad_norm": 1.1276084582645993, + "learning_rate": 0.00022265676621724132, + "loss": 1.713, + "step": 4473 + }, + { + "epoch": 2.8883150419625565, + "grad_norm": 1.1229725932432162, + "learning_rate": 0.00022260023803870273, + "loss": 2.0507, + "step": 4474 + }, + { + "epoch": 2.8889606197546804, + "grad_norm": 1.1828657077551896, + "learning_rate": 0.00022254369639206626, + "loss": 2.0606, + "step": 4475 + }, + { + "epoch": 2.8896061975468044, + "grad_norm": 1.130300507850626, + "learning_rate": 0.00022248714128782088, + "loss": 2.0759, + "step": 4476 + }, + { + "epoch": 2.8902517753389283, + "grad_norm": 1.1745720626525478, + "learning_rate": 0.00022243057273645816, + "loss": 2.0663, + "step": 4477 + }, + { + "epoch": 2.8908973531310522, + "grad_norm": 1.3050951223909932, + "learning_rate": 0.00022237399074847224, + "loss": 2.228, + "step": 4478 + }, + { + "epoch": 2.891542930923176, + "grad_norm": 1.2664461224668844, + "learning_rate": 0.00022231739533435963, + "loss": 1.9311, + "step": 4479 + }, + { + "epoch": 2.8921885087153, + "grad_norm": 1.268875453163053, + "learning_rate": 0.0002222607865046193, + "loss": 2.17, + "step": 4480 + }, + { + "epoch": 2.892834086507424, + "grad_norm": 1.1208448433242635, + "learning_rate": 0.00022220416426975293, + "loss": 1.6832, + "step": 4481 + }, + { + "epoch": 2.893479664299548, + "grad_norm": 1.2031702641529312, + "learning_rate": 0.00022214752864026436, + "loss": 2.0777, + "step": 4482 + }, + { + "epoch": 2.894125242091672, + "grad_norm": 1.211544420305167, + "learning_rate": 0.00022209087962666018, + "loss": 1.9284, + "step": 4483 + }, + { + "epoch": 2.894770819883796, + "grad_norm": 1.3507800576657039, + "learning_rate": 0.00022203421723944933, + "loss": 2.1002, + "step": 4484 + }, + { + "epoch": 2.8954163976759197, + "grad_norm": 1.1970627309268458, + "learning_rate": 0.00022197754148914328, + "loss": 2.0069, + "step": 4485 + }, + { + "epoch": 2.8960619754680437, + "grad_norm": 1.1012610559454727, + "learning_rate": 0.00022192085238625608, + "loss": 1.7885, + "step": 4486 + }, + { + "epoch": 2.8967075532601676, + "grad_norm": 1.1874518209177332, + "learning_rate": 0.00022186414994130395, + "loss": 2.0843, + "step": 4487 + }, + { + "epoch": 2.8973531310522915, + "grad_norm": 1.1208094624658584, + "learning_rate": 0.00022180743416480589, + "loss": 1.7186, + "step": 4488 + }, + { + "epoch": 2.897998708844416, + "grad_norm": 1.3404736991381285, + "learning_rate": 0.0002217507050672833, + "loss": 2.1982, + "step": 4489 + }, + { + "epoch": 2.89864428663654, + "grad_norm": 1.2829177354339045, + "learning_rate": 0.00022169396265925997, + "loss": 2.1923, + "step": 4490 + }, + { + "epoch": 2.8992898644286638, + "grad_norm": 1.3140350250257196, + "learning_rate": 0.00022163720695126215, + "loss": 2.1552, + "step": 4491 + }, + { + "epoch": 2.8999354422207877, + "grad_norm": 1.2711425547637194, + "learning_rate": 0.00022158043795381872, + "loss": 2.2482, + "step": 4492 + }, + { + "epoch": 2.9005810200129116, + "grad_norm": 1.3231622662767486, + "learning_rate": 0.00022152365567746087, + "loss": 2.1501, + "step": 4493 + }, + { + "epoch": 2.9012265978050356, + "grad_norm": 1.2950298347292657, + "learning_rate": 0.0002214668601327223, + "loss": 2.3003, + "step": 4494 + }, + { + "epoch": 2.9018721755971595, + "grad_norm": 1.2714908717019704, + "learning_rate": 0.00022141005133013918, + "loss": 1.9448, + "step": 4495 + }, + { + "epoch": 2.9025177533892834, + "grad_norm": 1.2555536263122484, + "learning_rate": 0.0002213532292802501, + "loss": 1.9135, + "step": 4496 + }, + { + "epoch": 2.9031633311814073, + "grad_norm": 1.0635046078508745, + "learning_rate": 0.00022129639399359622, + "loss": 1.7571, + "step": 4497 + }, + { + "epoch": 2.9038089089735313, + "grad_norm": 1.1981434451744832, + "learning_rate": 0.00022123954548072098, + "loss": 2.0994, + "step": 4498 + }, + { + "epoch": 2.904454486765655, + "grad_norm": 1.170121821529632, + "learning_rate": 0.00022118268375217039, + "loss": 2.1229, + "step": 4499 + }, + { + "epoch": 2.905100064557779, + "grad_norm": 1.4495787039485577, + "learning_rate": 0.000221125808818493, + "loss": 1.7429, + "step": 4500 + }, + { + "epoch": 2.905745642349903, + "grad_norm": 1.1471805768724515, + "learning_rate": 0.0002210689206902396, + "loss": 1.8882, + "step": 4501 + }, + { + "epoch": 2.906391220142027, + "grad_norm": 1.1084952015126475, + "learning_rate": 0.00022101201937796346, + "loss": 1.8922, + "step": 4502 + }, + { + "epoch": 2.907036797934151, + "grad_norm": 1.2469169412839283, + "learning_rate": 0.0002209551048922205, + "loss": 2.1605, + "step": 4503 + }, + { + "epoch": 2.907682375726275, + "grad_norm": 1.1178064815544155, + "learning_rate": 0.00022089817724356891, + "loss": 1.976, + "step": 4504 + }, + { + "epoch": 2.9083279535183992, + "grad_norm": 1.2529782040114608, + "learning_rate": 0.0002208412364425693, + "loss": 2.1132, + "step": 4505 + }, + { + "epoch": 2.908973531310523, + "grad_norm": 1.3103598514353, + "learning_rate": 0.00022078428249978487, + "loss": 2.1451, + "step": 4506 + }, + { + "epoch": 2.909619109102647, + "grad_norm": 1.215499178040414, + "learning_rate": 0.00022072731542578113, + "loss": 2.2734, + "step": 4507 + }, + { + "epoch": 2.910264686894771, + "grad_norm": 1.1802791639631556, + "learning_rate": 0.00022067033523112602, + "loss": 1.9146, + "step": 4508 + }, + { + "epoch": 2.910910264686895, + "grad_norm": 1.2559211982300424, + "learning_rate": 0.00022061334192639002, + "loss": 2.039, + "step": 4509 + }, + { + "epoch": 2.911555842479019, + "grad_norm": 1.1450607055966333, + "learning_rate": 0.0002205563355221459, + "loss": 2.1061, + "step": 4510 + }, + { + "epoch": 2.912201420271143, + "grad_norm": 1.186299812859347, + "learning_rate": 0.00022049931602896908, + "loss": 1.9881, + "step": 4511 + }, + { + "epoch": 2.9128469980632667, + "grad_norm": 1.3391069568521217, + "learning_rate": 0.00022044228345743716, + "loss": 1.8974, + "step": 4512 + }, + { + "epoch": 2.9134925758553907, + "grad_norm": 1.1769105941572533, + "learning_rate": 0.00022038523781813023, + "loss": 1.6668, + "step": 4513 + }, + { + "epoch": 2.9141381536475146, + "grad_norm": 1.2216024920823758, + "learning_rate": 0.00022032817912163102, + "loss": 2.0164, + "step": 4514 + }, + { + "epoch": 2.9147837314396385, + "grad_norm": 1.122810783688427, + "learning_rate": 0.00022027110737852439, + "loss": 2.0053, + "step": 4515 + }, + { + "epoch": 2.9154293092317625, + "grad_norm": 1.215306128453833, + "learning_rate": 0.00022021402259939766, + "loss": 2.0405, + "step": 4516 + }, + { + "epoch": 2.9160748870238864, + "grad_norm": 1.2802670132586, + "learning_rate": 0.0002201569247948408, + "loss": 2.311, + "step": 4517 + }, + { + "epoch": 2.9167204648160103, + "grad_norm": 1.2274825308403148, + "learning_rate": 0.00022009981397544601, + "loss": 1.9041, + "step": 4518 + }, + { + "epoch": 2.9173660426081343, + "grad_norm": 1.2378136951598553, + "learning_rate": 0.0002200426901518079, + "loss": 2.1791, + "step": 4519 + }, + { + "epoch": 2.918011620400258, + "grad_norm": 1.2354112488420415, + "learning_rate": 0.00021998555333452356, + "loss": 2.1238, + "step": 4520 + }, + { + "epoch": 2.918657198192382, + "grad_norm": 1.236811140644281, + "learning_rate": 0.00021992840353419246, + "loss": 1.9591, + "step": 4521 + }, + { + "epoch": 2.919302775984506, + "grad_norm": 1.2623345080178583, + "learning_rate": 0.00021987124076141648, + "loss": 2.0622, + "step": 4522 + }, + { + "epoch": 2.91994835377663, + "grad_norm": 1.26678381082581, + "learning_rate": 0.00021981406502679992, + "loss": 2.2501, + "step": 4523 + }, + { + "epoch": 2.920593931568754, + "grad_norm": 1.1692145372568812, + "learning_rate": 0.0002197568763409494, + "loss": 1.625, + "step": 4524 + }, + { + "epoch": 2.921239509360878, + "grad_norm": 1.218234356606606, + "learning_rate": 0.0002196996747144741, + "loss": 2.066, + "step": 4525 + }, + { + "epoch": 2.9218850871530018, + "grad_norm": 1.373211294243257, + "learning_rate": 0.00021964246015798554, + "loss": 2.0599, + "step": 4526 + }, + { + "epoch": 2.9225306649451257, + "grad_norm": 1.2280693087551515, + "learning_rate": 0.0002195852326820975, + "loss": 2.2094, + "step": 4527 + }, + { + "epoch": 2.9231762427372496, + "grad_norm": 1.387590024445824, + "learning_rate": 0.00021952799229742637, + "loss": 2.4041, + "step": 4528 + }, + { + "epoch": 2.9238218205293736, + "grad_norm": 1.200801727313604, + "learning_rate": 0.00021947073901459083, + "loss": 2.2285, + "step": 4529 + }, + { + "epoch": 2.9244673983214975, + "grad_norm": 1.0858225121501832, + "learning_rate": 0.00021941347284421185, + "loss": 2.1554, + "step": 4530 + }, + { + "epoch": 2.9251129761136214, + "grad_norm": 0.9994714484253922, + "learning_rate": 0.00021935619379691298, + "loss": 1.5552, + "step": 4531 + }, + { + "epoch": 2.925758553905746, + "grad_norm": 1.175339169348842, + "learning_rate": 0.00021929890188332013, + "loss": 2.0032, + "step": 4532 + }, + { + "epoch": 2.9264041316978697, + "grad_norm": 1.1835236700110157, + "learning_rate": 0.00021924159711406144, + "loss": 1.925, + "step": 4533 + }, + { + "epoch": 2.9270497094899937, + "grad_norm": 1.2298022059284237, + "learning_rate": 0.00021918427949976766, + "loss": 2.1807, + "step": 4534 + }, + { + "epoch": 2.9276952872821176, + "grad_norm": 1.1476562362541085, + "learning_rate": 0.00021912694905107163, + "loss": 1.969, + "step": 4535 + }, + { + "epoch": 2.9283408650742415, + "grad_norm": 1.1927892775261193, + "learning_rate": 0.00021906960577860893, + "loss": 2.2471, + "step": 4536 + }, + { + "epoch": 2.9289864428663654, + "grad_norm": 1.1236029740165705, + "learning_rate": 0.00021901224969301722, + "loss": 1.8663, + "step": 4537 + }, + { + "epoch": 2.9296320206584894, + "grad_norm": 1.0019077134543188, + "learning_rate": 0.00021895488080493666, + "loss": 1.6251, + "step": 4538 + }, + { + "epoch": 2.9302775984506133, + "grad_norm": 1.1802384784225375, + "learning_rate": 0.0002188974991250098, + "loss": 2.1514, + "step": 4539 + }, + { + "epoch": 2.9309231762427372, + "grad_norm": 1.0875411965245576, + "learning_rate": 0.00021884010466388155, + "loss": 1.8952, + "step": 4540 + }, + { + "epoch": 2.931568754034861, + "grad_norm": 1.1439682508496927, + "learning_rate": 0.00021878269743219914, + "loss": 1.9463, + "step": 4541 + }, + { + "epoch": 2.932214331826985, + "grad_norm": 1.2572634069238955, + "learning_rate": 0.00021872527744061222, + "loss": 2.195, + "step": 4542 + }, + { + "epoch": 2.932859909619109, + "grad_norm": 1.2259306184373928, + "learning_rate": 0.00021866784469977283, + "loss": 2.2091, + "step": 4543 + }, + { + "epoch": 2.933505487411233, + "grad_norm": 1.2247701569819165, + "learning_rate": 0.00021861039922033528, + "loss": 2.1363, + "step": 4544 + }, + { + "epoch": 2.934151065203357, + "grad_norm": 1.07730969643725, + "learning_rate": 0.00021855294101295634, + "loss": 2.0586, + "step": 4545 + }, + { + "epoch": 2.934796642995481, + "grad_norm": 1.1869746494632565, + "learning_rate": 0.0002184954700882951, + "loss": 1.9932, + "step": 4546 + }, + { + "epoch": 2.9354422207876048, + "grad_norm": 1.2640940192495438, + "learning_rate": 0.00021843798645701304, + "loss": 2.0143, + "step": 4547 + }, + { + "epoch": 2.936087798579729, + "grad_norm": 1.1492282627720773, + "learning_rate": 0.00021838049012977388, + "loss": 2.0114, + "step": 4548 + }, + { + "epoch": 2.936733376371853, + "grad_norm": 1.2001171076769954, + "learning_rate": 0.00021832298111724383, + "loss": 2.0037, + "step": 4549 + }, + { + "epoch": 2.937378954163977, + "grad_norm": 1.1041865938235567, + "learning_rate": 0.00021826545943009145, + "loss": 1.8202, + "step": 4550 + }, + { + "epoch": 2.938024531956101, + "grad_norm": 1.1827667915106321, + "learning_rate": 0.00021820792507898766, + "loss": 2.0925, + "step": 4551 + }, + { + "epoch": 2.938670109748225, + "grad_norm": 1.1392748043201821, + "learning_rate": 0.00021815037807460552, + "loss": 2.035, + "step": 4552 + }, + { + "epoch": 2.9393156875403488, + "grad_norm": 1.1212087056643723, + "learning_rate": 0.0002180928184276207, + "loss": 1.83, + "step": 4553 + }, + { + "epoch": 2.9399612653324727, + "grad_norm": 1.2750068676270991, + "learning_rate": 0.00021803524614871114, + "loss": 2.0089, + "step": 4554 + }, + { + "epoch": 2.9406068431245966, + "grad_norm": 1.1859249580582822, + "learning_rate": 0.00021797766124855697, + "loss": 1.906, + "step": 4555 + }, + { + "epoch": 2.9412524209167206, + "grad_norm": 1.1972541393466434, + "learning_rate": 0.0002179200637378409, + "loss": 1.9837, + "step": 4556 + }, + { + "epoch": 2.9418979987088445, + "grad_norm": 1.2327984240329808, + "learning_rate": 0.0002178624536272478, + "loss": 2.0864, + "step": 4557 + }, + { + "epoch": 2.9425435765009684, + "grad_norm": 1.0767455139248048, + "learning_rate": 0.00021780483092746504, + "loss": 1.709, + "step": 4558 + }, + { + "epoch": 2.9431891542930924, + "grad_norm": 1.282436862359231, + "learning_rate": 0.00021774719564918215, + "loss": 2.0207, + "step": 4559 + }, + { + "epoch": 2.9438347320852163, + "grad_norm": 1.201672309432074, + "learning_rate": 0.000217689547803091, + "loss": 2.1012, + "step": 4560 + }, + { + "epoch": 2.94448030987734, + "grad_norm": 1.1996689861396588, + "learning_rate": 0.00021763188739988608, + "loss": 2.1851, + "step": 4561 + }, + { + "epoch": 2.945125887669464, + "grad_norm": 1.2241333779014651, + "learning_rate": 0.00021757421445026378, + "loss": 2.0024, + "step": 4562 + }, + { + "epoch": 2.945771465461588, + "grad_norm": 1.2613699457835268, + "learning_rate": 0.00021751652896492311, + "loss": 2.1903, + "step": 4563 + }, + { + "epoch": 2.946417043253712, + "grad_norm": 1.2292877505868676, + "learning_rate": 0.00021745883095456535, + "loss": 2.0322, + "step": 4564 + }, + { + "epoch": 2.947062621045836, + "grad_norm": 1.2199214348922354, + "learning_rate": 0.00021740112042989413, + "loss": 2.0598, + "step": 4565 + }, + { + "epoch": 2.94770819883796, + "grad_norm": 1.245052069768271, + "learning_rate": 0.0002173433974016152, + "loss": 2.0822, + "step": 4566 + }, + { + "epoch": 2.948353776630084, + "grad_norm": 1.213001937589161, + "learning_rate": 0.00021728566188043686, + "loss": 2.039, + "step": 4567 + }, + { + "epoch": 2.9489993544222077, + "grad_norm": 1.1817187771694382, + "learning_rate": 0.0002172279138770697, + "loss": 2.2296, + "step": 4568 + }, + { + "epoch": 2.9496449322143317, + "grad_norm": 1.1186167084479086, + "learning_rate": 0.0002171701534022265, + "loss": 1.8314, + "step": 4569 + }, + { + "epoch": 2.9502905100064556, + "grad_norm": 1.1102601914373282, + "learning_rate": 0.00021711238046662247, + "loss": 1.9157, + "step": 4570 + }, + { + "epoch": 2.9509360877985795, + "grad_norm": 1.1370254381481395, + "learning_rate": 0.00021705459508097506, + "loss": 2.0461, + "step": 4571 + }, + { + "epoch": 2.9515816655907035, + "grad_norm": 1.138379673385808, + "learning_rate": 0.0002169967972560041, + "loss": 1.925, + "step": 4572 + }, + { + "epoch": 2.9522272433828274, + "grad_norm": 1.2104660264109415, + "learning_rate": 0.00021693898700243165, + "loss": 1.9246, + "step": 4573 + }, + { + "epoch": 2.9528728211749513, + "grad_norm": 1.2014281415317523, + "learning_rate": 0.00021688116433098205, + "loss": 2.0668, + "step": 4574 + }, + { + "epoch": 2.9535183989670757, + "grad_norm": 1.2038382211485448, + "learning_rate": 0.00021682332925238213, + "loss": 1.9436, + "step": 4575 + }, + { + "epoch": 2.9541639767591996, + "grad_norm": 1.1343938108613287, + "learning_rate": 0.0002167654817773609, + "loss": 1.9075, + "step": 4576 + }, + { + "epoch": 2.9548095545513235, + "grad_norm": 1.2080311164368494, + "learning_rate": 0.0002167076219166495, + "loss": 2.0419, + "step": 4577 + }, + { + "epoch": 2.9554551323434475, + "grad_norm": 1.2432862598956047, + "learning_rate": 0.00021664974968098175, + "loss": 1.978, + "step": 4578 + }, + { + "epoch": 2.9561007101355714, + "grad_norm": 1.3535653845704827, + "learning_rate": 0.0002165918650810934, + "loss": 1.9997, + "step": 4579 + }, + { + "epoch": 2.9567462879276953, + "grad_norm": 1.1925774115256236, + "learning_rate": 0.00021653396812772272, + "loss": 2.0515, + "step": 4580 + }, + { + "epoch": 2.9573918657198193, + "grad_norm": 1.136394921876915, + "learning_rate": 0.00021647605883161013, + "loss": 1.9071, + "step": 4581 + }, + { + "epoch": 2.958037443511943, + "grad_norm": 1.2005133978341167, + "learning_rate": 0.00021641813720349847, + "loss": 2.2197, + "step": 4582 + }, + { + "epoch": 2.958683021304067, + "grad_norm": 1.1364174267805496, + "learning_rate": 0.00021636020325413279, + "loss": 2.0299, + "step": 4583 + }, + { + "epoch": 2.959328599096191, + "grad_norm": 1.2731867513435315, + "learning_rate": 0.0002163022569942604, + "loss": 1.8742, + "step": 4584 + }, + { + "epoch": 2.959974176888315, + "grad_norm": 1.136335919789458, + "learning_rate": 0.00021624429843463091, + "loss": 2.1017, + "step": 4585 + }, + { + "epoch": 2.960619754680439, + "grad_norm": 1.1535003677304552, + "learning_rate": 0.00021618632758599638, + "loss": 1.9111, + "step": 4586 + }, + { + "epoch": 2.961265332472563, + "grad_norm": 1.2493666950820348, + "learning_rate": 0.00021612834445911087, + "loss": 1.984, + "step": 4587 + }, + { + "epoch": 2.961910910264687, + "grad_norm": 1.16998884961751, + "learning_rate": 0.00021607034906473088, + "loss": 2.0296, + "step": 4588 + }, + { + "epoch": 2.9625564880568107, + "grad_norm": 1.0726090054906485, + "learning_rate": 0.00021601234141361517, + "loss": 1.6838, + "step": 4589 + }, + { + "epoch": 2.9632020658489346, + "grad_norm": 1.217874451042311, + "learning_rate": 0.0002159543215165248, + "loss": 2.1254, + "step": 4590 + }, + { + "epoch": 2.963847643641059, + "grad_norm": 1.167526548888025, + "learning_rate": 0.00021589628938422295, + "loss": 1.9992, + "step": 4591 + }, + { + "epoch": 2.964493221433183, + "grad_norm": 1.1572219391594172, + "learning_rate": 0.0002158382450274753, + "loss": 1.8812, + "step": 4592 + }, + { + "epoch": 2.965138799225307, + "grad_norm": 1.189537186600458, + "learning_rate": 0.00021578018845704963, + "loss": 1.943, + "step": 4593 + }, + { + "epoch": 2.965784377017431, + "grad_norm": 1.1693764920088274, + "learning_rate": 0.00021572211968371604, + "loss": 2.2432, + "step": 4594 + }, + { + "epoch": 2.9664299548095547, + "grad_norm": 1.2558647694042464, + "learning_rate": 0.0002156640387182469, + "loss": 2.2766, + "step": 4595 + }, + { + "epoch": 2.9670755326016787, + "grad_norm": 1.1755092147596973, + "learning_rate": 0.00021560594557141683, + "loss": 1.9216, + "step": 4596 + }, + { + "epoch": 2.9677211103938026, + "grad_norm": 1.2850604367474376, + "learning_rate": 0.0002155478402540027, + "loss": 2.15, + "step": 4597 + }, + { + "epoch": 2.9683666881859265, + "grad_norm": 1.0778777072920955, + "learning_rate": 0.00021548972277678365, + "loss": 1.9935, + "step": 4598 + }, + { + "epoch": 2.9690122659780505, + "grad_norm": 1.1922468138176, + "learning_rate": 0.00021543159315054106, + "loss": 2.16, + "step": 4599 + }, + { + "epoch": 2.9696578437701744, + "grad_norm": 1.1637235025801096, + "learning_rate": 0.00021537345138605862, + "loss": 1.9211, + "step": 4600 + }, + { + "epoch": 2.9703034215622983, + "grad_norm": 1.2940425686971724, + "learning_rate": 0.0002153152974941223, + "loss": 1.9975, + "step": 4601 + }, + { + "epoch": 2.9709489993544222, + "grad_norm": 1.1093304533574613, + "learning_rate": 0.00021525713148552004, + "loss": 1.9469, + "step": 4602 + }, + { + "epoch": 2.971594577146546, + "grad_norm": 1.1703852138166115, + "learning_rate": 0.0002151989533710424, + "loss": 2.0231, + "step": 4603 + }, + { + "epoch": 2.97224015493867, + "grad_norm": 1.1121803874745444, + "learning_rate": 0.000215140763161482, + "loss": 1.9632, + "step": 4604 + }, + { + "epoch": 2.972885732730794, + "grad_norm": 1.2596586946609971, + "learning_rate": 0.00021508256086763368, + "loss": 2.1814, + "step": 4605 + }, + { + "epoch": 2.973531310522918, + "grad_norm": 1.250341094586115, + "learning_rate": 0.00021502434650029468, + "loss": 2.1639, + "step": 4606 + }, + { + "epoch": 2.974176888315042, + "grad_norm": 1.1568455755514995, + "learning_rate": 0.00021496612007026423, + "loss": 2.3414, + "step": 4607 + }, + { + "epoch": 2.974822466107166, + "grad_norm": 1.1606422365820466, + "learning_rate": 0.000214907881588344, + "loss": 1.9765, + "step": 4608 + }, + { + "epoch": 2.9754680438992898, + "grad_norm": 1.0204044800306555, + "learning_rate": 0.00021484963106533787, + "loss": 1.8148, + "step": 4609 + }, + { + "epoch": 2.9761136216914137, + "grad_norm": 1.1520702992765495, + "learning_rate": 0.00021479136851205183, + "loss": 2.0793, + "step": 4610 + }, + { + "epoch": 2.9767591994835376, + "grad_norm": 1.1653341023537778, + "learning_rate": 0.0002147330939392943, + "loss": 2.1231, + "step": 4611 + }, + { + "epoch": 2.9774047772756616, + "grad_norm": 1.2349202136427482, + "learning_rate": 0.00021467480735787574, + "loss": 1.9373, + "step": 4612 + }, + { + "epoch": 2.9780503550677855, + "grad_norm": 1.259858417421424, + "learning_rate": 0.00021461650877860886, + "loss": 2.1907, + "step": 4613 + }, + { + "epoch": 2.9786959328599094, + "grad_norm": 1.1730455978524723, + "learning_rate": 0.00021455819821230882, + "loss": 1.9824, + "step": 4614 + }, + { + "epoch": 2.9793415106520333, + "grad_norm": 1.1909633129892963, + "learning_rate": 0.00021449987566979275, + "loss": 1.8628, + "step": 4615 + }, + { + "epoch": 2.9799870884441573, + "grad_norm": 1.157181041914165, + "learning_rate": 0.00021444154116187999, + "loss": 2.0829, + "step": 4616 + }, + { + "epoch": 2.980632666236281, + "grad_norm": 1.1248192959276047, + "learning_rate": 0.00021438319469939233, + "loss": 1.5627, + "step": 4617 + }, + { + "epoch": 2.9812782440284056, + "grad_norm": 1.1147593499516912, + "learning_rate": 0.0002143248362931536, + "loss": 1.7151, + "step": 4618 + }, + { + "epoch": 2.9819238218205295, + "grad_norm": 1.299932078569392, + "learning_rate": 0.0002142664659539899, + "loss": 2.0532, + "step": 4619 + }, + { + "epoch": 2.9825693996126534, + "grad_norm": 1.0787363970525732, + "learning_rate": 0.0002142080836927295, + "loss": 1.6184, + "step": 4620 + }, + { + "epoch": 2.9832149774047774, + "grad_norm": 1.2662395279330647, + "learning_rate": 0.00021414968952020294, + "loss": 2.1269, + "step": 4621 + }, + { + "epoch": 2.9838605551969013, + "grad_norm": 1.2729096349432518, + "learning_rate": 0.000214091283447243, + "loss": 2.1682, + "step": 4622 + }, + { + "epoch": 2.9845061329890252, + "grad_norm": 1.1368135332332776, + "learning_rate": 0.0002140328654846845, + "loss": 2.0384, + "step": 4623 + }, + { + "epoch": 2.985151710781149, + "grad_norm": 1.2485387412049622, + "learning_rate": 0.0002139744356433646, + "loss": 2.0726, + "step": 4624 + }, + { + "epoch": 2.985797288573273, + "grad_norm": 1.1860233414026171, + "learning_rate": 0.00021391599393412274, + "loss": 2.0233, + "step": 4625 + }, + { + "epoch": 2.986442866365397, + "grad_norm": 1.1844289880605812, + "learning_rate": 0.00021385754036780044, + "loss": 1.6076, + "step": 4626 + }, + { + "epoch": 2.987088444157521, + "grad_norm": 1.1227460616675757, + "learning_rate": 0.00021379907495524132, + "loss": 1.5593, + "step": 4627 + }, + { + "epoch": 2.987734021949645, + "grad_norm": 1.1867718363390158, + "learning_rate": 0.00021374059770729145, + "loss": 1.8718, + "step": 4628 + }, + { + "epoch": 2.988379599741769, + "grad_norm": 1.0957363102646795, + "learning_rate": 0.0002136821086347989, + "loss": 1.8827, + "step": 4629 + }, + { + "epoch": 2.9890251775338927, + "grad_norm": 1.2188708646757338, + "learning_rate": 0.00021362360774861405, + "loss": 1.63, + "step": 4630 + }, + { + "epoch": 2.9896707553260167, + "grad_norm": 1.222363088694996, + "learning_rate": 0.00021356509505958938, + "loss": 2.2072, + "step": 4631 + }, + { + "epoch": 2.9903163331181406, + "grad_norm": 1.1607569373845938, + "learning_rate": 0.0002135065705785796, + "loss": 1.9902, + "step": 4632 + }, + { + "epoch": 2.9909619109102645, + "grad_norm": 1.4044326026939273, + "learning_rate": 0.00021344803431644165, + "loss": 1.879, + "step": 4633 + }, + { + "epoch": 2.991607488702389, + "grad_norm": 1.2568290587914215, + "learning_rate": 0.0002133894862840346, + "loss": 2.1362, + "step": 4634 + }, + { + "epoch": 2.992253066494513, + "grad_norm": 1.2034898829949199, + "learning_rate": 0.0002133309264922197, + "loss": 2.1784, + "step": 4635 + }, + { + "epoch": 2.9928986442866368, + "grad_norm": 1.1987340509625235, + "learning_rate": 0.00021327235495186038, + "loss": 2.0268, + "step": 4636 + }, + { + "epoch": 2.9935442220787607, + "grad_norm": 1.1668351137335682, + "learning_rate": 0.00021321377167382236, + "loss": 1.7946, + "step": 4637 + }, + { + "epoch": 2.9941897998708846, + "grad_norm": 1.1906251467539708, + "learning_rate": 0.00021315517666897326, + "loss": 2.023, + "step": 4638 + }, + { + "epoch": 2.9948353776630086, + "grad_norm": 1.234710620502495, + "learning_rate": 0.0002130965699481833, + "loss": 1.8642, + "step": 4639 + }, + { + "epoch": 2.9954809554551325, + "grad_norm": 1.3069727275905405, + "learning_rate": 0.0002130379515223245, + "loss": 2.1987, + "step": 4640 + }, + { + "epoch": 2.9961265332472564, + "grad_norm": 1.1317175907739294, + "learning_rate": 0.00021297932140227117, + "loss": 2.1455, + "step": 4641 + }, + { + "epoch": 2.9967721110393803, + "grad_norm": 1.1444096607891112, + "learning_rate": 0.00021292067959889985, + "loss": 2.0317, + "step": 4642 + }, + { + "epoch": 2.9974176888315043, + "grad_norm": 1.1092387892460525, + "learning_rate": 0.0002128620261230892, + "loss": 1.8804, + "step": 4643 + }, + { + "epoch": 2.998063266623628, + "grad_norm": 1.1100568870591343, + "learning_rate": 0.00021280336098572004, + "loss": 1.9899, + "step": 4644 + }, + { + "epoch": 2.998708844415752, + "grad_norm": 1.1468612854710047, + "learning_rate": 0.00021274468419767538, + "loss": 2.0196, + "step": 4645 + }, + { + "epoch": 2.999354422207876, + "grad_norm": 1.1456751271099512, + "learning_rate": 0.00021268599576984028, + "loss": 1.9904, + "step": 4646 + }, + { + "epoch": 3.0, + "grad_norm": 1.2590579718068213, + "learning_rate": 0.00021262729571310226, + "loss": 2.0438, + "step": 4647 + }, + { + "epoch": 3.0, + "eval_loss": 2.105350971221924, + "eval_runtime": 58.4037, + "eval_samples_per_second": 5.941, + "eval_steps_per_second": 5.941, + "step": 4647 + }, + { + "epoch": 3.000645577792124, + "grad_norm": 1.0050507348593074, + "learning_rate": 0.00021256858403835062, + "loss": 1.2462, + "step": 4648 + }, + { + "epoch": 3.001291155584248, + "grad_norm": 0.9535209528002351, + "learning_rate": 0.00021250986075647697, + "loss": 1.2426, + "step": 4649 + }, + { + "epoch": 3.001936733376372, + "grad_norm": 1.1971119767833431, + "learning_rate": 0.0002124511258783752, + "loss": 1.7707, + "step": 4650 + }, + { + "epoch": 3.0025823111684957, + "grad_norm": 1.2624712464656862, + "learning_rate": 0.0002123923794149412, + "loss": 1.8664, + "step": 4651 + }, + { + "epoch": 3.0032278889606197, + "grad_norm": 1.2090223629055328, + "learning_rate": 0.00021233362137707297, + "loss": 1.8565, + "step": 4652 + }, + { + "epoch": 3.0038734667527436, + "grad_norm": 0.9172416984474622, + "learning_rate": 0.00021227485177567084, + "loss": 0.9881, + "step": 4653 + }, + { + "epoch": 3.0045190445448675, + "grad_norm": 1.405519326846389, + "learning_rate": 0.00021221607062163713, + "loss": 1.7664, + "step": 4654 + }, + { + "epoch": 3.0051646223369914, + "grad_norm": 1.703010677748229, + "learning_rate": 0.00021215727792587636, + "loss": 1.8263, + "step": 4655 + }, + { + "epoch": 3.0058102001291154, + "grad_norm": 1.1697788943859349, + "learning_rate": 0.0002120984736992952, + "loss": 1.4126, + "step": 4656 + }, + { + "epoch": 3.0064557779212393, + "grad_norm": 1.39939309495812, + "learning_rate": 0.00021203965795280245, + "loss": 1.6484, + "step": 4657 + }, + { + "epoch": 3.0071013557133637, + "grad_norm": 1.3716827611669329, + "learning_rate": 0.00021198083069730902, + "loss": 1.5755, + "step": 4658 + }, + { + "epoch": 3.0077469335054876, + "grad_norm": 1.2984290308611237, + "learning_rate": 0.00021192199194372798, + "loss": 1.6701, + "step": 4659 + }, + { + "epoch": 3.0083925112976115, + "grad_norm": 1.356490954314649, + "learning_rate": 0.0002118631417029745, + "loss": 1.7364, + "step": 4660 + }, + { + "epoch": 3.0090380890897355, + "grad_norm": 1.2005681643226307, + "learning_rate": 0.000211804279985966, + "loss": 1.6219, + "step": 4661 + }, + { + "epoch": 3.0096836668818594, + "grad_norm": 1.2857181845546413, + "learning_rate": 0.00021174540680362188, + "loss": 1.5984, + "step": 4662 + }, + { + "epoch": 3.0103292446739833, + "grad_norm": 1.0806037095785557, + "learning_rate": 0.0002116865221668637, + "loss": 1.3196, + "step": 4663 + }, + { + "epoch": 3.0109748224661073, + "grad_norm": 1.228898195926363, + "learning_rate": 0.00021162762608661518, + "loss": 1.9028, + "step": 4664 + }, + { + "epoch": 3.011620400258231, + "grad_norm": 1.0768286675960397, + "learning_rate": 0.00021156871857380227, + "loss": 1.3835, + "step": 4665 + }, + { + "epoch": 3.012265978050355, + "grad_norm": 1.4418437429071858, + "learning_rate": 0.00021150979963935272, + "loss": 1.6763, + "step": 4666 + }, + { + "epoch": 3.012911555842479, + "grad_norm": 1.250398812092016, + "learning_rate": 0.0002114508692941968, + "loss": 1.8385, + "step": 4667 + }, + { + "epoch": 3.013557133634603, + "grad_norm": 1.2299771074188086, + "learning_rate": 0.00021139192754926657, + "loss": 1.7412, + "step": 4668 + }, + { + "epoch": 3.014202711426727, + "grad_norm": 1.1926705218571427, + "learning_rate": 0.00021133297441549643, + "loss": 1.4936, + "step": 4669 + }, + { + "epoch": 3.014848289218851, + "grad_norm": 1.2597206592231602, + "learning_rate": 0.00021127400990382273, + "loss": 1.7088, + "step": 4670 + }, + { + "epoch": 3.0154938670109748, + "grad_norm": 1.3521002276479583, + "learning_rate": 0.00021121503402518393, + "loss": 1.5996, + "step": 4671 + }, + { + "epoch": 3.0161394448030987, + "grad_norm": 1.299911078344437, + "learning_rate": 0.00021115604679052092, + "loss": 1.6515, + "step": 4672 + }, + { + "epoch": 3.0167850225952226, + "grad_norm": 1.2623091720373039, + "learning_rate": 0.00021109704821077615, + "loss": 1.3962, + "step": 4673 + }, + { + "epoch": 3.0174306003873466, + "grad_norm": 1.316269058103647, + "learning_rate": 0.00021103803829689463, + "loss": 1.7691, + "step": 4674 + }, + { + "epoch": 3.0180761781794705, + "grad_norm": 1.2579963556948814, + "learning_rate": 0.0002109790170598233, + "loss": 1.5034, + "step": 4675 + }, + { + "epoch": 3.0187217559715944, + "grad_norm": 1.4838210766557303, + "learning_rate": 0.00021091998451051123, + "loss": 1.7984, + "step": 4676 + }, + { + "epoch": 3.0193673337637184, + "grad_norm": 1.2552166555959026, + "learning_rate": 0.0002108609406599095, + "loss": 1.5025, + "step": 4677 + }, + { + "epoch": 3.0200129115558423, + "grad_norm": 1.2293409210464665, + "learning_rate": 0.00021080188551897137, + "loss": 1.3541, + "step": 4678 + }, + { + "epoch": 3.020658489347966, + "grad_norm": 1.4768215591621097, + "learning_rate": 0.00021074281909865228, + "loss": 1.8408, + "step": 4679 + }, + { + "epoch": 3.0213040671400906, + "grad_norm": 1.5573237867552001, + "learning_rate": 0.00021068374140990952, + "loss": 2.0205, + "step": 4680 + }, + { + "epoch": 3.0219496449322145, + "grad_norm": 1.3668067786006535, + "learning_rate": 0.0002106246524637027, + "loss": 1.7613, + "step": 4681 + }, + { + "epoch": 3.0225952227243384, + "grad_norm": 1.1936391271802775, + "learning_rate": 0.0002105655522709934, + "loss": 1.5404, + "step": 4682 + }, + { + "epoch": 3.0232408005164624, + "grad_norm": 1.150613018474057, + "learning_rate": 0.0002105064408427454, + "loss": 1.3233, + "step": 4683 + }, + { + "epoch": 3.0238863783085863, + "grad_norm": 1.384734224640452, + "learning_rate": 0.0002104473181899244, + "loss": 1.8603, + "step": 4684 + }, + { + "epoch": 3.0245319561007102, + "grad_norm": 1.165223092496318, + "learning_rate": 0.00021038818432349822, + "loss": 1.5525, + "step": 4685 + }, + { + "epoch": 3.025177533892834, + "grad_norm": 1.301245968960201, + "learning_rate": 0.0002103290392544369, + "loss": 1.7611, + "step": 4686 + }, + { + "epoch": 3.025823111684958, + "grad_norm": 1.2495722252060009, + "learning_rate": 0.00021026988299371248, + "loss": 1.4391, + "step": 4687 + }, + { + "epoch": 3.026468689477082, + "grad_norm": 1.1290417140021052, + "learning_rate": 0.00021021071555229895, + "loss": 1.4153, + "step": 4688 + }, + { + "epoch": 3.027114267269206, + "grad_norm": 1.2375329985576708, + "learning_rate": 0.00021015153694117262, + "loss": 1.578, + "step": 4689 + }, + { + "epoch": 3.02775984506133, + "grad_norm": 1.1549373010493529, + "learning_rate": 0.00021009234717131166, + "loss": 1.2973, + "step": 4690 + }, + { + "epoch": 3.028405422853454, + "grad_norm": 1.411633826320661, + "learning_rate": 0.00021003314625369634, + "loss": 1.6492, + "step": 4691 + }, + { + "epoch": 3.0290510006455778, + "grad_norm": 1.5897780952703808, + "learning_rate": 0.00020997393419930913, + "loss": 1.7161, + "step": 4692 + }, + { + "epoch": 3.0296965784377017, + "grad_norm": 1.3540142151630639, + "learning_rate": 0.00020991471101913445, + "loss": 1.6854, + "step": 4693 + }, + { + "epoch": 3.0303421562298256, + "grad_norm": 1.2055876162119679, + "learning_rate": 0.00020985547672415882, + "loss": 1.4189, + "step": 4694 + }, + { + "epoch": 3.0309877340219495, + "grad_norm": 1.4254087414165184, + "learning_rate": 0.00020979623132537083, + "loss": 1.889, + "step": 4695 + }, + { + "epoch": 3.0316333118140735, + "grad_norm": 1.4405369898436402, + "learning_rate": 0.00020973697483376105, + "loss": 1.8274, + "step": 4696 + }, + { + "epoch": 3.0322788896061974, + "grad_norm": 1.342161039149646, + "learning_rate": 0.0002096777072603223, + "loss": 1.6323, + "step": 4697 + }, + { + "epoch": 3.0329244673983213, + "grad_norm": 1.335345433818146, + "learning_rate": 0.00020961842861604919, + "loss": 1.6388, + "step": 4698 + }, + { + "epoch": 3.0335700451904453, + "grad_norm": 1.241939886501526, + "learning_rate": 0.00020955913891193857, + "loss": 1.7683, + "step": 4699 + }, + { + "epoch": 3.034215622982569, + "grad_norm": 1.2725825476028554, + "learning_rate": 0.00020949983815898935, + "loss": 1.5629, + "step": 4700 + }, + { + "epoch": 3.0348612007746936, + "grad_norm": 1.2013439730097233, + "learning_rate": 0.00020944052636820244, + "loss": 1.393, + "step": 4701 + }, + { + "epoch": 3.0355067785668175, + "grad_norm": 1.1229013790092615, + "learning_rate": 0.0002093812035505807, + "loss": 1.5021, + "step": 4702 + }, + { + "epoch": 3.0361523563589414, + "grad_norm": 1.3510318168095694, + "learning_rate": 0.00020932186971712918, + "loss": 1.6581, + "step": 4703 + }, + { + "epoch": 3.0367979341510654, + "grad_norm": 1.179567478290489, + "learning_rate": 0.00020926252487885491, + "loss": 1.5036, + "step": 4704 + }, + { + "epoch": 3.0374435119431893, + "grad_norm": 1.3259510814126712, + "learning_rate": 0.00020920316904676702, + "loss": 1.9767, + "step": 4705 + }, + { + "epoch": 3.038089089735313, + "grad_norm": 1.4591124128290418, + "learning_rate": 0.00020914380223187662, + "loss": 1.9265, + "step": 4706 + }, + { + "epoch": 3.038734667527437, + "grad_norm": 1.3063511738078901, + "learning_rate": 0.00020908442444519674, + "loss": 1.4262, + "step": 4707 + }, + { + "epoch": 3.039380245319561, + "grad_norm": 1.2630582895982654, + "learning_rate": 0.0002090250356977428, + "loss": 1.5233, + "step": 4708 + }, + { + "epoch": 3.040025823111685, + "grad_norm": 1.5607997033491516, + "learning_rate": 0.0002089656360005319, + "loss": 2.0823, + "step": 4709 + }, + { + "epoch": 3.040671400903809, + "grad_norm": 1.3854419172075505, + "learning_rate": 0.00020890622536458325, + "loss": 1.877, + "step": 4710 + }, + { + "epoch": 3.041316978695933, + "grad_norm": 1.274012596985811, + "learning_rate": 0.00020884680380091827, + "loss": 1.643, + "step": 4711 + }, + { + "epoch": 3.041962556488057, + "grad_norm": 1.2664902600302985, + "learning_rate": 0.0002087873713205602, + "loss": 1.6252, + "step": 4712 + }, + { + "epoch": 3.0426081342801807, + "grad_norm": 1.2594229312006997, + "learning_rate": 0.00020872792793453436, + "loss": 1.8814, + "step": 4713 + }, + { + "epoch": 3.0432537120723047, + "grad_norm": 1.4246858653106373, + "learning_rate": 0.0002086684736538682, + "loss": 2.0128, + "step": 4714 + }, + { + "epoch": 3.0438992898644286, + "grad_norm": 1.4002765120619336, + "learning_rate": 0.000208609008489591, + "loss": 1.7683, + "step": 4715 + }, + { + "epoch": 3.0445448676565525, + "grad_norm": 1.245090636748946, + "learning_rate": 0.00020854953245273429, + "loss": 1.6976, + "step": 4716 + }, + { + "epoch": 3.0451904454486765, + "grad_norm": 1.1309979957120029, + "learning_rate": 0.00020849004555433136, + "loss": 1.3545, + "step": 4717 + }, + { + "epoch": 3.0458360232408004, + "grad_norm": 1.2769040759764223, + "learning_rate": 0.00020843054780541776, + "loss": 1.5255, + "step": 4718 + }, + { + "epoch": 3.0464816010329243, + "grad_norm": 1.3406265566184228, + "learning_rate": 0.00020837103921703086, + "loss": 1.8, + "step": 4719 + }, + { + "epoch": 3.0471271788250482, + "grad_norm": 1.4944652439560313, + "learning_rate": 0.00020831151980021018, + "loss": 1.9864, + "step": 4720 + }, + { + "epoch": 3.047772756617172, + "grad_norm": 1.335777624830681, + "learning_rate": 0.0002082519895659971, + "loss": 1.7363, + "step": 4721 + }, + { + "epoch": 3.048418334409296, + "grad_norm": 1.238014264126393, + "learning_rate": 0.00020819244852543525, + "loss": 1.4517, + "step": 4722 + }, + { + "epoch": 3.0490639122014205, + "grad_norm": 1.4669009896045218, + "learning_rate": 0.00020813289668956996, + "loss": 1.5984, + "step": 4723 + }, + { + "epoch": 3.0497094899935444, + "grad_norm": 1.257345686939191, + "learning_rate": 0.00020807333406944875, + "loss": 1.6722, + "step": 4724 + }, + { + "epoch": 3.0503550677856683, + "grad_norm": 1.2922727537014256, + "learning_rate": 0.00020801376067612116, + "loss": 1.4353, + "step": 4725 + }, + { + "epoch": 3.0510006455777923, + "grad_norm": 1.4024517331165631, + "learning_rate": 0.00020795417652063865, + "loss": 1.8836, + "step": 4726 + }, + { + "epoch": 3.051646223369916, + "grad_norm": 1.354119654693712, + "learning_rate": 0.0002078945816140547, + "loss": 1.5891, + "step": 4727 + }, + { + "epoch": 3.05229180116204, + "grad_norm": 1.2709105795912534, + "learning_rate": 0.00020783497596742472, + "loss": 1.3527, + "step": 4728 + }, + { + "epoch": 3.052937378954164, + "grad_norm": 1.3571408313653983, + "learning_rate": 0.00020777535959180628, + "loss": 1.6509, + "step": 4729 + }, + { + "epoch": 3.053582956746288, + "grad_norm": 1.2875598058075912, + "learning_rate": 0.00020771573249825877, + "loss": 1.6762, + "step": 4730 + }, + { + "epoch": 3.054228534538412, + "grad_norm": 1.498998826521538, + "learning_rate": 0.00020765609469784366, + "loss": 1.9905, + "step": 4731 + }, + { + "epoch": 3.054874112330536, + "grad_norm": 1.4862659761984218, + "learning_rate": 0.0002075964462016243, + "loss": 1.7238, + "step": 4732 + }, + { + "epoch": 3.05551969012266, + "grad_norm": 1.3307988464929683, + "learning_rate": 0.00020753678702066633, + "loss": 1.666, + "step": 4733 + }, + { + "epoch": 3.0561652679147837, + "grad_norm": 1.3637794177951175, + "learning_rate": 0.0002074771171660369, + "loss": 1.606, + "step": 4734 + }, + { + "epoch": 3.0568108457069076, + "grad_norm": 1.3257189190744985, + "learning_rate": 0.0002074174366488055, + "loss": 1.5982, + "step": 4735 + }, + { + "epoch": 3.0574564234990316, + "grad_norm": 1.2433311001027942, + "learning_rate": 0.00020735774548004345, + "loss": 1.5355, + "step": 4736 + }, + { + "epoch": 3.0581020012911555, + "grad_norm": 1.2051106900310287, + "learning_rate": 0.00020729804367082414, + "loss": 1.5971, + "step": 4737 + }, + { + "epoch": 3.0587475790832794, + "grad_norm": 1.0855407218941202, + "learning_rate": 0.00020723833123222277, + "loss": 1.3101, + "step": 4738 + }, + { + "epoch": 3.0593931568754034, + "grad_norm": 1.4835151229746213, + "learning_rate": 0.00020717860817531675, + "loss": 1.6427, + "step": 4739 + }, + { + "epoch": 3.0600387346675273, + "grad_norm": 1.577332927227798, + "learning_rate": 0.0002071188745111852, + "loss": 1.6538, + "step": 4740 + }, + { + "epoch": 3.0606843124596512, + "grad_norm": 1.376474834886469, + "learning_rate": 0.00020705913025090947, + "loss": 1.7491, + "step": 4741 + }, + { + "epoch": 3.061329890251775, + "grad_norm": 1.439885666939585, + "learning_rate": 0.00020699937540557262, + "loss": 1.8692, + "step": 4742 + }, + { + "epoch": 3.061975468043899, + "grad_norm": 1.399436936773167, + "learning_rate": 0.00020693960998625986, + "loss": 1.7446, + "step": 4743 + }, + { + "epoch": 3.062621045836023, + "grad_norm": 1.4582409817588373, + "learning_rate": 0.00020687983400405826, + "loss": 1.9175, + "step": 4744 + }, + { + "epoch": 3.0632666236281474, + "grad_norm": 1.3473325404691145, + "learning_rate": 0.0002068200474700569, + "loss": 1.8655, + "step": 4745 + }, + { + "epoch": 3.0639122014202713, + "grad_norm": 1.366400667132601, + "learning_rate": 0.00020676025039534677, + "loss": 1.7691, + "step": 4746 + }, + { + "epoch": 3.0645577792123952, + "grad_norm": 1.298694602430001, + "learning_rate": 0.00020670044279102098, + "loss": 1.5073, + "step": 4747 + }, + { + "epoch": 3.065203357004519, + "grad_norm": 1.1038594106535693, + "learning_rate": 0.0002066406246681743, + "loss": 1.355, + "step": 4748 + }, + { + "epoch": 3.065848934796643, + "grad_norm": 1.4209840809180248, + "learning_rate": 0.00020658079603790366, + "loss": 1.7438, + "step": 4749 + }, + { + "epoch": 3.066494512588767, + "grad_norm": 1.5703281455030167, + "learning_rate": 0.00020652095691130795, + "loss": 1.8983, + "step": 4750 + }, + { + "epoch": 3.067140090380891, + "grad_norm": 1.2518560570661772, + "learning_rate": 0.00020646110729948794, + "loss": 1.8328, + "step": 4751 + }, + { + "epoch": 3.067785668173015, + "grad_norm": 1.3729231390439505, + "learning_rate": 0.0002064012472135462, + "loss": 1.9918, + "step": 4752 + }, + { + "epoch": 3.068431245965139, + "grad_norm": 1.2997782632607968, + "learning_rate": 0.0002063413766645876, + "loss": 1.7181, + "step": 4753 + }, + { + "epoch": 3.0690768237572628, + "grad_norm": 1.0450613036742336, + "learning_rate": 0.00020628149566371865, + "loss": 1.2983, + "step": 4754 + }, + { + "epoch": 3.0697224015493867, + "grad_norm": 1.2809102766739067, + "learning_rate": 0.00020622160422204794, + "loss": 1.7876, + "step": 4755 + }, + { + "epoch": 3.0703679793415106, + "grad_norm": 1.259288799476463, + "learning_rate": 0.00020616170235068595, + "loss": 1.4341, + "step": 4756 + }, + { + "epoch": 3.0710135571336346, + "grad_norm": 1.3219607746277657, + "learning_rate": 0.000206101790060745, + "loss": 1.6523, + "step": 4757 + }, + { + "epoch": 3.0716591349257585, + "grad_norm": 1.3329184913328007, + "learning_rate": 0.0002060418673633396, + "loss": 1.6389, + "step": 4758 + }, + { + "epoch": 3.0723047127178824, + "grad_norm": 1.1935181839142228, + "learning_rate": 0.00020598193426958592, + "loss": 1.4606, + "step": 4759 + }, + { + "epoch": 3.0729502905100063, + "grad_norm": 1.314953812022364, + "learning_rate": 0.0002059219907906022, + "loss": 1.6165, + "step": 4760 + }, + { + "epoch": 3.0735958683021303, + "grad_norm": 1.4457829360119505, + "learning_rate": 0.00020586203693750857, + "loss": 1.7823, + "step": 4761 + }, + { + "epoch": 3.074241446094254, + "grad_norm": 1.246043640173344, + "learning_rate": 0.00020580207272142715, + "loss": 1.365, + "step": 4762 + }, + { + "epoch": 3.074887023886378, + "grad_norm": 1.4663170376748706, + "learning_rate": 0.00020574209815348184, + "loss": 1.7607, + "step": 4763 + }, + { + "epoch": 3.075532601678502, + "grad_norm": 1.401476940171712, + "learning_rate": 0.0002056821132447986, + "loss": 1.6894, + "step": 4764 + }, + { + "epoch": 3.0761781794706264, + "grad_norm": 1.32591052724875, + "learning_rate": 0.00020562211800650523, + "loss": 1.6597, + "step": 4765 + }, + { + "epoch": 3.0768237572627504, + "grad_norm": 1.4583648446772584, + "learning_rate": 0.00020556211244973147, + "loss": 1.7202, + "step": 4766 + }, + { + "epoch": 3.0774693350548743, + "grad_norm": 1.3346863538283642, + "learning_rate": 0.000205502096585609, + "loss": 1.5499, + "step": 4767 + }, + { + "epoch": 3.0781149128469982, + "grad_norm": 1.367708905279769, + "learning_rate": 0.0002054420704252713, + "loss": 1.6462, + "step": 4768 + }, + { + "epoch": 3.078760490639122, + "grad_norm": 1.2105338546939854, + "learning_rate": 0.00020538203397985403, + "loss": 1.3938, + "step": 4769 + }, + { + "epoch": 3.079406068431246, + "grad_norm": 1.373606214205432, + "learning_rate": 0.0002053219872604944, + "loss": 1.7439, + "step": 4770 + }, + { + "epoch": 3.08005164622337, + "grad_norm": 1.190702479381533, + "learning_rate": 0.00020526193027833173, + "loss": 1.2078, + "step": 4771 + }, + { + "epoch": 3.080697224015494, + "grad_norm": 1.3474477764391837, + "learning_rate": 0.0002052018630445073, + "loss": 1.6333, + "step": 4772 + }, + { + "epoch": 3.081342801807618, + "grad_norm": 1.3855058594461311, + "learning_rate": 0.0002051417855701641, + "loss": 1.6945, + "step": 4773 + }, + { + "epoch": 3.081988379599742, + "grad_norm": 1.2869023436460474, + "learning_rate": 0.00020508169786644714, + "loss": 1.6327, + "step": 4774 + }, + { + "epoch": 3.0826339573918657, + "grad_norm": 1.3345564872689304, + "learning_rate": 0.0002050215999445034, + "loss": 1.7976, + "step": 4775 + }, + { + "epoch": 3.0832795351839897, + "grad_norm": 1.272664346882192, + "learning_rate": 0.0002049614918154816, + "loss": 1.5861, + "step": 4776 + }, + { + "epoch": 3.0839251129761136, + "grad_norm": 1.3159368239552423, + "learning_rate": 0.00020490137349053243, + "loss": 1.6058, + "step": 4777 + }, + { + "epoch": 3.0845706907682375, + "grad_norm": 1.3512312293872697, + "learning_rate": 0.00020484124498080844, + "loss": 1.6782, + "step": 4778 + }, + { + "epoch": 3.0852162685603615, + "grad_norm": 1.3587400508440386, + "learning_rate": 0.00020478110629746417, + "loss": 1.6752, + "step": 4779 + }, + { + "epoch": 3.0858618463524854, + "grad_norm": 1.4998973244454212, + "learning_rate": 0.00020472095745165591, + "loss": 1.6074, + "step": 4780 + }, + { + "epoch": 3.0865074241446093, + "grad_norm": 1.3663201695956362, + "learning_rate": 0.0002046607984545419, + "loss": 1.5573, + "step": 4781 + }, + { + "epoch": 3.0871530019367333, + "grad_norm": 1.3595232797074468, + "learning_rate": 0.00020460062931728225, + "loss": 1.7303, + "step": 4782 + }, + { + "epoch": 3.087798579728857, + "grad_norm": 1.2844021910880519, + "learning_rate": 0.00020454045005103901, + "loss": 1.5732, + "step": 4783 + }, + { + "epoch": 3.088444157520981, + "grad_norm": 1.257852233115912, + "learning_rate": 0.00020448026066697598, + "loss": 1.5505, + "step": 4784 + }, + { + "epoch": 3.089089735313105, + "grad_norm": 1.4138814371578394, + "learning_rate": 0.000204420061176259, + "loss": 1.5846, + "step": 4785 + }, + { + "epoch": 3.089735313105229, + "grad_norm": 1.4637290967682248, + "learning_rate": 0.0002043598515900556, + "loss": 1.8268, + "step": 4786 + }, + { + "epoch": 3.090380890897353, + "grad_norm": 1.290218846697018, + "learning_rate": 0.00020429963191953543, + "loss": 1.3165, + "step": 4787 + }, + { + "epoch": 3.0910264686894773, + "grad_norm": 1.2176752383959546, + "learning_rate": 0.00020423940217586967, + "loss": 1.5015, + "step": 4788 + }, + { + "epoch": 3.091672046481601, + "grad_norm": 1.3393194535124024, + "learning_rate": 0.00020417916237023173, + "loss": 1.577, + "step": 4789 + }, + { + "epoch": 3.092317624273725, + "grad_norm": 1.3900850148332045, + "learning_rate": 0.00020411891251379666, + "loss": 1.5187, + "step": 4790 + }, + { + "epoch": 3.092963202065849, + "grad_norm": 1.3494213424145551, + "learning_rate": 0.00020405865261774147, + "loss": 1.5349, + "step": 4791 + }, + { + "epoch": 3.093608779857973, + "grad_norm": 1.4223935383993471, + "learning_rate": 0.0002039983826932449, + "loss": 1.699, + "step": 4792 + }, + { + "epoch": 3.094254357650097, + "grad_norm": 1.4123690240494258, + "learning_rate": 0.00020393810275148768, + "loss": 1.7201, + "step": 4793 + }, + { + "epoch": 3.094899935442221, + "grad_norm": 1.3481864649005701, + "learning_rate": 0.00020387781280365247, + "loss": 1.4394, + "step": 4794 + }, + { + "epoch": 3.095545513234345, + "grad_norm": 1.177844000596365, + "learning_rate": 0.00020381751286092356, + "loss": 1.2766, + "step": 4795 + }, + { + "epoch": 3.0961910910264687, + "grad_norm": 1.3805379084049278, + "learning_rate": 0.00020375720293448724, + "loss": 1.645, + "step": 4796 + }, + { + "epoch": 3.0968366688185927, + "grad_norm": 1.4558420647883328, + "learning_rate": 0.00020369688303553172, + "loss": 1.7401, + "step": 4797 + }, + { + "epoch": 3.0974822466107166, + "grad_norm": 1.3710402619868796, + "learning_rate": 0.00020363655317524686, + "loss": 1.5224, + "step": 4798 + }, + { + "epoch": 3.0981278244028405, + "grad_norm": 1.23440500842748, + "learning_rate": 0.00020357621336482447, + "loss": 1.3554, + "step": 4799 + }, + { + "epoch": 3.0987734021949644, + "grad_norm": 1.436358425611383, + "learning_rate": 0.00020351586361545826, + "loss": 1.6097, + "step": 4800 + }, + { + "epoch": 3.0994189799870884, + "grad_norm": 1.6152593685722618, + "learning_rate": 0.00020345550393834378, + "loss": 1.9592, + "step": 4801 + }, + { + "epoch": 3.1000645577792123, + "grad_norm": 1.3280694093690861, + "learning_rate": 0.00020339513434467824, + "loss": 1.6995, + "step": 4802 + }, + { + "epoch": 3.1007101355713362, + "grad_norm": 1.336879715400209, + "learning_rate": 0.00020333475484566094, + "loss": 1.5672, + "step": 4803 + }, + { + "epoch": 3.10135571336346, + "grad_norm": 1.2009653120146668, + "learning_rate": 0.00020327436545249287, + "loss": 1.4366, + "step": 4804 + }, + { + "epoch": 3.102001291155584, + "grad_norm": 1.3020880707519296, + "learning_rate": 0.0002032139661763769, + "loss": 1.8509, + "step": 4805 + }, + { + "epoch": 3.102646868947708, + "grad_norm": 1.1545235353701453, + "learning_rate": 0.0002031535570285177, + "loss": 1.423, + "step": 4806 + }, + { + "epoch": 3.103292446739832, + "grad_norm": 1.2005340354603395, + "learning_rate": 0.0002030931380201218, + "loss": 1.6135, + "step": 4807 + }, + { + "epoch": 3.103938024531956, + "grad_norm": 1.166582544226183, + "learning_rate": 0.00020303270916239763, + "loss": 1.3837, + "step": 4808 + }, + { + "epoch": 3.1045836023240803, + "grad_norm": 1.2685040755529513, + "learning_rate": 0.00020297227046655523, + "loss": 1.5945, + "step": 4809 + }, + { + "epoch": 3.105229180116204, + "grad_norm": 1.5023429971434232, + "learning_rate": 0.0002029118219438067, + "loss": 1.861, + "step": 4810 + }, + { + "epoch": 3.105874757908328, + "grad_norm": 1.2703619251108804, + "learning_rate": 0.00020285136360536585, + "loss": 1.4792, + "step": 4811 + }, + { + "epoch": 3.106520335700452, + "grad_norm": 1.46952899946235, + "learning_rate": 0.00020279089546244836, + "loss": 1.9528, + "step": 4812 + }, + { + "epoch": 3.107165913492576, + "grad_norm": 1.4679822636480386, + "learning_rate": 0.0002027304175262716, + "loss": 1.8553, + "step": 4813 + }, + { + "epoch": 3.1078114912847, + "grad_norm": 1.3646425573993546, + "learning_rate": 0.00020266992980805495, + "loss": 1.7231, + "step": 4814 + }, + { + "epoch": 3.108457069076824, + "grad_norm": 1.3128245354022043, + "learning_rate": 0.0002026094323190195, + "loss": 1.7852, + "step": 4815 + }, + { + "epoch": 3.1091026468689478, + "grad_norm": 1.2467462212959002, + "learning_rate": 0.0002025489250703881, + "loss": 1.5635, + "step": 4816 + }, + { + "epoch": 3.1097482246610717, + "grad_norm": 1.4585097881883289, + "learning_rate": 0.00020248840807338554, + "loss": 1.827, + "step": 4817 + }, + { + "epoch": 3.1103938024531956, + "grad_norm": 1.3504201147328643, + "learning_rate": 0.00020242788133923828, + "loss": 1.5715, + "step": 4818 + }, + { + "epoch": 3.1110393802453196, + "grad_norm": 1.3444883959147231, + "learning_rate": 0.0002023673448791748, + "loss": 1.5487, + "step": 4819 + }, + { + "epoch": 3.1116849580374435, + "grad_norm": 1.200502610710635, + "learning_rate": 0.00020230679870442505, + "loss": 1.2867, + "step": 4820 + }, + { + "epoch": 3.1123305358295674, + "grad_norm": 1.3862013059199898, + "learning_rate": 0.00020224624282622107, + "loss": 1.8285, + "step": 4821 + }, + { + "epoch": 3.1129761136216914, + "grad_norm": 1.2345261140624646, + "learning_rate": 0.00020218567725579665, + "loss": 1.3785, + "step": 4822 + }, + { + "epoch": 3.1136216914138153, + "grad_norm": 1.3172472452122614, + "learning_rate": 0.0002021251020043873, + "loss": 1.7436, + "step": 4823 + }, + { + "epoch": 3.114267269205939, + "grad_norm": 1.17630638875474, + "learning_rate": 0.00020206451708323027, + "loss": 1.3224, + "step": 4824 + }, + { + "epoch": 3.114912846998063, + "grad_norm": 1.5373240813904663, + "learning_rate": 0.0002020039225035648, + "loss": 1.8092, + "step": 4825 + }, + { + "epoch": 3.115558424790187, + "grad_norm": 1.4419628991159938, + "learning_rate": 0.00020194331827663186, + "loss": 1.7802, + "step": 4826 + }, + { + "epoch": 3.116204002582311, + "grad_norm": 1.4555679336712881, + "learning_rate": 0.00020188270441367397, + "loss": 1.7975, + "step": 4827 + }, + { + "epoch": 3.116849580374435, + "grad_norm": 1.2232270850550766, + "learning_rate": 0.00020182208092593576, + "loss": 1.4866, + "step": 4828 + }, + { + "epoch": 3.117495158166559, + "grad_norm": 1.38509518242082, + "learning_rate": 0.00020176144782466354, + "loss": 1.7636, + "step": 4829 + }, + { + "epoch": 3.118140735958683, + "grad_norm": 1.2856724167994236, + "learning_rate": 0.00020170080512110533, + "loss": 1.6485, + "step": 4830 + }, + { + "epoch": 3.118786313750807, + "grad_norm": 1.3810883637312816, + "learning_rate": 0.00020164015282651097, + "loss": 1.8706, + "step": 4831 + }, + { + "epoch": 3.119431891542931, + "grad_norm": 1.3105968581739718, + "learning_rate": 0.0002015794909521321, + "loss": 1.5939, + "step": 4832 + }, + { + "epoch": 3.120077469335055, + "grad_norm": 1.2782667063073765, + "learning_rate": 0.00020151881950922218, + "loss": 1.47, + "step": 4833 + }, + { + "epoch": 3.120723047127179, + "grad_norm": 1.2266630609634528, + "learning_rate": 0.00020145813850903633, + "loss": 1.4783, + "step": 4834 + }, + { + "epoch": 3.121368624919303, + "grad_norm": 1.2909654848352494, + "learning_rate": 0.00020139744796283148, + "loss": 1.6643, + "step": 4835 + }, + { + "epoch": 3.122014202711427, + "grad_norm": 1.230425270243385, + "learning_rate": 0.00020133674788186647, + "loss": 1.6698, + "step": 4836 + }, + { + "epoch": 3.1226597805035508, + "grad_norm": 1.2579537237175908, + "learning_rate": 0.00020127603827740174, + "loss": 1.5383, + "step": 4837 + }, + { + "epoch": 3.1233053582956747, + "grad_norm": 1.3928284765964525, + "learning_rate": 0.00020121531916069947, + "loss": 1.7671, + "step": 4838 + }, + { + "epoch": 3.1239509360877986, + "grad_norm": 1.3833930688092022, + "learning_rate": 0.0002011545905430238, + "loss": 1.7579, + "step": 4839 + }, + { + "epoch": 3.1245965138799225, + "grad_norm": 1.2818899286158059, + "learning_rate": 0.00020109385243564045, + "loss": 1.7442, + "step": 4840 + }, + { + "epoch": 3.1252420916720465, + "grad_norm": 1.3368162576688203, + "learning_rate": 0.00020103310484981705, + "loss": 1.7197, + "step": 4841 + }, + { + "epoch": 3.1258876694641704, + "grad_norm": 1.2881968091166722, + "learning_rate": 0.00020097234779682278, + "loss": 1.6153, + "step": 4842 + }, + { + "epoch": 3.1265332472562943, + "grad_norm": 1.324456972175078, + "learning_rate": 0.0002009115812879288, + "loss": 1.4269, + "step": 4843 + }, + { + "epoch": 3.1271788250484183, + "grad_norm": 1.2383347689337763, + "learning_rate": 0.00020085080533440794, + "loss": 1.4451, + "step": 4844 + }, + { + "epoch": 3.127824402840542, + "grad_norm": 1.562582927609635, + "learning_rate": 0.0002007900199475347, + "loss": 1.7653, + "step": 4845 + }, + { + "epoch": 3.128469980632666, + "grad_norm": 1.2968472826499868, + "learning_rate": 0.00020072922513858542, + "loss": 1.6805, + "step": 4846 + }, + { + "epoch": 3.12911555842479, + "grad_norm": 1.350557915752751, + "learning_rate": 0.00020066842091883823, + "loss": 1.6832, + "step": 4847 + }, + { + "epoch": 3.129761136216914, + "grad_norm": 1.5891847359707028, + "learning_rate": 0.0002006076072995729, + "loss": 1.8032, + "step": 4848 + }, + { + "epoch": 3.130406714009038, + "grad_norm": 1.2156044993567625, + "learning_rate": 0.00020054678429207094, + "loss": 1.3536, + "step": 4849 + }, + { + "epoch": 3.131052291801162, + "grad_norm": 1.4415911599424849, + "learning_rate": 0.00020048595190761572, + "loss": 1.7727, + "step": 4850 + }, + { + "epoch": 3.131697869593286, + "grad_norm": 1.3406525113688477, + "learning_rate": 0.00020042511015749232, + "loss": 1.4642, + "step": 4851 + }, + { + "epoch": 3.13234344738541, + "grad_norm": 1.4438236928745805, + "learning_rate": 0.00020036425905298736, + "loss": 1.8116, + "step": 4852 + }, + { + "epoch": 3.132989025177534, + "grad_norm": 1.405275422083178, + "learning_rate": 0.00020030339860538946, + "loss": 1.8061, + "step": 4853 + }, + { + "epoch": 3.133634602969658, + "grad_norm": 1.3491326542592652, + "learning_rate": 0.00020024252882598886, + "loss": 1.668, + "step": 4854 + }, + { + "epoch": 3.134280180761782, + "grad_norm": 1.2839334662374913, + "learning_rate": 0.00020018164972607753, + "loss": 1.7094, + "step": 4855 + }, + { + "epoch": 3.134925758553906, + "grad_norm": 1.196909288983767, + "learning_rate": 0.00020012076131694913, + "loss": 1.3783, + "step": 4856 + }, + { + "epoch": 3.13557133634603, + "grad_norm": 1.4351897462723258, + "learning_rate": 0.00020005986360989916, + "loss": 1.781, + "step": 4857 + }, + { + "epoch": 3.1362169141381537, + "grad_norm": 1.363106794806833, + "learning_rate": 0.00019999895661622477, + "loss": 1.8386, + "step": 4858 + }, + { + "epoch": 3.1368624919302777, + "grad_norm": 1.3891217433506302, + "learning_rate": 0.00019993804034722478, + "loss": 1.6753, + "step": 4859 + }, + { + "epoch": 3.1375080697224016, + "grad_norm": 1.5583860642896348, + "learning_rate": 0.0001998771148141998, + "loss": 1.9199, + "step": 4860 + }, + { + "epoch": 3.1381536475145255, + "grad_norm": 1.396197435859823, + "learning_rate": 0.00019981618002845226, + "loss": 1.7563, + "step": 4861 + }, + { + "epoch": 3.1387992253066495, + "grad_norm": 1.2591765218818567, + "learning_rate": 0.0001997552360012861, + "loss": 1.5472, + "step": 4862 + }, + { + "epoch": 3.1394448030987734, + "grad_norm": 1.260207814514171, + "learning_rate": 0.00019969428274400702, + "loss": 1.4335, + "step": 4863 + }, + { + "epoch": 3.1400903808908973, + "grad_norm": 1.3569550374627561, + "learning_rate": 0.00019963332026792254, + "loss": 1.8009, + "step": 4864 + }, + { + "epoch": 3.1407359586830212, + "grad_norm": 1.3595372761853863, + "learning_rate": 0.00019957234858434189, + "loss": 1.6649, + "step": 4865 + }, + { + "epoch": 3.141381536475145, + "grad_norm": 1.3498185279903239, + "learning_rate": 0.0001995113677045759, + "loss": 1.5943, + "step": 4866 + }, + { + "epoch": 3.142027114267269, + "grad_norm": 1.1877563671897158, + "learning_rate": 0.0001994503776399371, + "loss": 1.5305, + "step": 4867 + }, + { + "epoch": 3.142672692059393, + "grad_norm": 1.3753180024976657, + "learning_rate": 0.00019938937840173983, + "loss": 1.8174, + "step": 4868 + }, + { + "epoch": 3.143318269851517, + "grad_norm": 1.3351918212625549, + "learning_rate": 0.00019932837000130016, + "loss": 1.5517, + "step": 4869 + }, + { + "epoch": 3.143963847643641, + "grad_norm": 1.3477804329214316, + "learning_rate": 0.00019926735244993568, + "loss": 1.6258, + "step": 4870 + }, + { + "epoch": 3.144609425435765, + "grad_norm": 1.1429897808410594, + "learning_rate": 0.00019920632575896573, + "loss": 1.2863, + "step": 4871 + }, + { + "epoch": 3.1452550032278888, + "grad_norm": 1.3030103903296564, + "learning_rate": 0.00019914528993971153, + "loss": 1.6832, + "step": 4872 + }, + { + "epoch": 3.1459005810200127, + "grad_norm": 1.4720180448066595, + "learning_rate": 0.00019908424500349582, + "loss": 1.8938, + "step": 4873 + }, + { + "epoch": 3.146546158812137, + "grad_norm": 1.5159513873770323, + "learning_rate": 0.00019902319096164303, + "loss": 1.8786, + "step": 4874 + }, + { + "epoch": 3.147191736604261, + "grad_norm": 1.7026899429017106, + "learning_rate": 0.00019896212782547932, + "loss": 1.6542, + "step": 4875 + }, + { + "epoch": 3.147837314396385, + "grad_norm": 1.4782078633413065, + "learning_rate": 0.00019890105560633258, + "loss": 1.7077, + "step": 4876 + }, + { + "epoch": 3.148482892188509, + "grad_norm": 1.316451098478764, + "learning_rate": 0.00019883997431553228, + "loss": 1.8155, + "step": 4877 + }, + { + "epoch": 3.149128469980633, + "grad_norm": 1.4328802652307528, + "learning_rate": 0.00019877888396440969, + "loss": 1.7948, + "step": 4878 + }, + { + "epoch": 3.1497740477727567, + "grad_norm": 1.273914748409915, + "learning_rate": 0.0001987177845642977, + "loss": 1.5949, + "step": 4879 + }, + { + "epoch": 3.1504196255648806, + "grad_norm": 1.3037260302892273, + "learning_rate": 0.0001986566761265308, + "loss": 1.686, + "step": 4880 + }, + { + "epoch": 3.1510652033570046, + "grad_norm": 1.3414813426131027, + "learning_rate": 0.00019859555866244536, + "loss": 1.7995, + "step": 4881 + }, + { + "epoch": 3.1517107811491285, + "grad_norm": 1.3204350113521741, + "learning_rate": 0.00019853443218337922, + "loss": 1.6623, + "step": 4882 + }, + { + "epoch": 3.1523563589412524, + "grad_norm": 1.2654553004037445, + "learning_rate": 0.00019847329670067207, + "loss": 1.5324, + "step": 4883 + }, + { + "epoch": 3.1530019367333764, + "grad_norm": 1.299731469160099, + "learning_rate": 0.00019841215222566504, + "loss": 1.7612, + "step": 4884 + }, + { + "epoch": 3.1536475145255003, + "grad_norm": 1.3574186110972435, + "learning_rate": 0.00019835099876970114, + "loss": 1.569, + "step": 4885 + }, + { + "epoch": 3.1542930923176242, + "grad_norm": 1.536742379524428, + "learning_rate": 0.00019828983634412496, + "loss": 1.781, + "step": 4886 + }, + { + "epoch": 3.154938670109748, + "grad_norm": 1.3667490842059342, + "learning_rate": 0.00019822866496028283, + "loss": 1.9591, + "step": 4887 + }, + { + "epoch": 3.155584247901872, + "grad_norm": 1.3887716526362253, + "learning_rate": 0.00019816748462952256, + "loss": 1.6464, + "step": 4888 + }, + { + "epoch": 3.156229825693996, + "grad_norm": 1.2495938285820274, + "learning_rate": 0.0001981062953631938, + "loss": 1.4542, + "step": 4889 + }, + { + "epoch": 3.15687540348612, + "grad_norm": 1.4970853478714117, + "learning_rate": 0.00019804509717264785, + "loss": 1.8639, + "step": 4890 + }, + { + "epoch": 3.157520981278244, + "grad_norm": 1.4148501961642708, + "learning_rate": 0.0001979838900692375, + "loss": 1.641, + "step": 4891 + }, + { + "epoch": 3.158166559070368, + "grad_norm": 1.2689607456770007, + "learning_rate": 0.00019792267406431734, + "loss": 1.4033, + "step": 4892 + }, + { + "epoch": 3.1588121368624917, + "grad_norm": 1.4405315769331075, + "learning_rate": 0.00019786144916924362, + "loss": 1.6407, + "step": 4893 + }, + { + "epoch": 3.159457714654616, + "grad_norm": 1.4752475716510387, + "learning_rate": 0.00019780021539537418, + "loss": 1.7932, + "step": 4894 + }, + { + "epoch": 3.16010329244674, + "grad_norm": 1.447793363008878, + "learning_rate": 0.00019773897275406847, + "loss": 1.7204, + "step": 4895 + }, + { + "epoch": 3.160748870238864, + "grad_norm": 1.3845102382778858, + "learning_rate": 0.00019767772125668762, + "loss": 1.7029, + "step": 4896 + }, + { + "epoch": 3.161394448030988, + "grad_norm": 1.4535969507199387, + "learning_rate": 0.00019761646091459454, + "loss": 1.9352, + "step": 4897 + }, + { + "epoch": 3.162040025823112, + "grad_norm": 1.3219145441755207, + "learning_rate": 0.0001975551917391536, + "loss": 1.7723, + "step": 4898 + }, + { + "epoch": 3.1626856036152358, + "grad_norm": 1.2985210641019949, + "learning_rate": 0.00019749391374173077, + "loss": 1.6467, + "step": 4899 + }, + { + "epoch": 3.1633311814073597, + "grad_norm": 1.3644837316677048, + "learning_rate": 0.00019743262693369389, + "loss": 1.6908, + "step": 4900 + }, + { + "epoch": 3.1639767591994836, + "grad_norm": 1.2593249649855067, + "learning_rate": 0.00019737133132641225, + "loss": 1.5548, + "step": 4901 + }, + { + "epoch": 3.1646223369916076, + "grad_norm": 1.3451811334824808, + "learning_rate": 0.0001973100269312568, + "loss": 1.6863, + "step": 4902 + }, + { + "epoch": 3.1652679147837315, + "grad_norm": 1.43930502585238, + "learning_rate": 0.0001972487137596002, + "loss": 1.8053, + "step": 4903 + }, + { + "epoch": 3.1659134925758554, + "grad_norm": 1.3031742366690575, + "learning_rate": 0.00019718739182281663, + "loss": 1.6331, + "step": 4904 + }, + { + "epoch": 3.1665590703679793, + "grad_norm": 1.302991842970838, + "learning_rate": 0.00019712606113228193, + "loss": 1.5862, + "step": 4905 + }, + { + "epoch": 3.1672046481601033, + "grad_norm": 1.3482494445253173, + "learning_rate": 0.00019706472169937363, + "loss": 1.7115, + "step": 4906 + }, + { + "epoch": 3.167850225952227, + "grad_norm": 1.2700738220536973, + "learning_rate": 0.00019700337353547074, + "loss": 1.4895, + "step": 4907 + }, + { + "epoch": 3.168495803744351, + "grad_norm": 1.2852176783447646, + "learning_rate": 0.00019694201665195415, + "loss": 1.6381, + "step": 4908 + }, + { + "epoch": 3.169141381536475, + "grad_norm": 1.503319208116009, + "learning_rate": 0.00019688065106020607, + "loss": 1.8588, + "step": 4909 + }, + { + "epoch": 3.169786959328599, + "grad_norm": 1.4596220997190064, + "learning_rate": 0.00019681927677161045, + "loss": 1.7129, + "step": 4910 + }, + { + "epoch": 3.170432537120723, + "grad_norm": 1.2735255639225227, + "learning_rate": 0.00019675789379755288, + "loss": 1.4658, + "step": 4911 + }, + { + "epoch": 3.171078114912847, + "grad_norm": 1.5610059103087983, + "learning_rate": 0.00019669650214942063, + "loss": 1.9479, + "step": 4912 + }, + { + "epoch": 3.171723692704971, + "grad_norm": 1.4595069694369633, + "learning_rate": 0.00019663510183860232, + "loss": 1.7846, + "step": 4913 + }, + { + "epoch": 3.1723692704970947, + "grad_norm": 1.3228840758279428, + "learning_rate": 0.0001965736928764885, + "loss": 1.7388, + "step": 4914 + }, + { + "epoch": 3.1730148482892186, + "grad_norm": 1.3351147881997623, + "learning_rate": 0.00019651227527447106, + "loss": 1.8183, + "step": 4915 + }, + { + "epoch": 3.1736604260813426, + "grad_norm": 1.2983719368708553, + "learning_rate": 0.0001964508490439437, + "loss": 1.7268, + "step": 4916 + }, + { + "epoch": 3.174306003873467, + "grad_norm": 1.143925768391522, + "learning_rate": 0.0001963894141963015, + "loss": 1.3464, + "step": 4917 + }, + { + "epoch": 3.174951581665591, + "grad_norm": 1.6090779665697152, + "learning_rate": 0.00019632797074294132, + "loss": 1.9187, + "step": 4918 + }, + { + "epoch": 3.175597159457715, + "grad_norm": 1.527295519537049, + "learning_rate": 0.00019626651869526166, + "loss": 1.8311, + "step": 4919 + }, + { + "epoch": 3.1762427372498387, + "grad_norm": 1.489350219089984, + "learning_rate": 0.00019620505806466237, + "loss": 1.727, + "step": 4920 + }, + { + "epoch": 3.1768883150419627, + "grad_norm": 1.3878592816903674, + "learning_rate": 0.00019614358886254507, + "loss": 1.7289, + "step": 4921 + }, + { + "epoch": 3.1775338928340866, + "grad_norm": 1.4110793987820243, + "learning_rate": 0.000196082111100313, + "loss": 1.8044, + "step": 4922 + }, + { + "epoch": 3.1781794706262105, + "grad_norm": 1.4550199891841333, + "learning_rate": 0.00019602062478937089, + "loss": 1.6673, + "step": 4923 + }, + { + "epoch": 3.1788250484183345, + "grad_norm": 1.3355041814138728, + "learning_rate": 0.000195959129941125, + "loss": 1.7112, + "step": 4924 + }, + { + "epoch": 3.1794706262104584, + "grad_norm": 1.4310689266924792, + "learning_rate": 0.00019589762656698338, + "loss": 1.8072, + "step": 4925 + }, + { + "epoch": 3.1801162040025823, + "grad_norm": 1.4117087363101972, + "learning_rate": 0.0001958361146783555, + "loss": 1.7092, + "step": 4926 + }, + { + "epoch": 3.1807617817947063, + "grad_norm": 1.377550863539276, + "learning_rate": 0.00019577459428665245, + "loss": 1.5474, + "step": 4927 + }, + { + "epoch": 3.18140735958683, + "grad_norm": 1.3567487123022395, + "learning_rate": 0.00019571306540328695, + "loss": 1.6876, + "step": 4928 + }, + { + "epoch": 3.182052937378954, + "grad_norm": 1.3028061490282712, + "learning_rate": 0.00019565152803967313, + "loss": 1.6361, + "step": 4929 + }, + { + "epoch": 3.182698515171078, + "grad_norm": 1.221540740069301, + "learning_rate": 0.00019558998220722696, + "loss": 1.5751, + "step": 4930 + }, + { + "epoch": 3.183344092963202, + "grad_norm": 1.3777289515966586, + "learning_rate": 0.00019552842791736573, + "loss": 1.5714, + "step": 4931 + }, + { + "epoch": 3.183989670755326, + "grad_norm": 1.261523930737631, + "learning_rate": 0.00019546686518150837, + "loss": 1.4313, + "step": 4932 + }, + { + "epoch": 3.18463524854745, + "grad_norm": 1.41608929764485, + "learning_rate": 0.00019540529401107555, + "loss": 1.7635, + "step": 4933 + }, + { + "epoch": 3.1852808263395738, + "grad_norm": 1.4355312221774943, + "learning_rate": 0.00019534371441748923, + "loss": 1.5494, + "step": 4934 + }, + { + "epoch": 3.1859264041316977, + "grad_norm": 1.4947859034179507, + "learning_rate": 0.0001952821264121731, + "loss": 1.7722, + "step": 4935 + }, + { + "epoch": 3.1865719819238216, + "grad_norm": 1.2929892835775247, + "learning_rate": 0.0001952205300065524, + "loss": 1.4969, + "step": 4936 + }, + { + "epoch": 3.187217559715946, + "grad_norm": 1.342071378899318, + "learning_rate": 0.0001951589252120539, + "loss": 1.7131, + "step": 4937 + }, + { + "epoch": 3.18786313750807, + "grad_norm": 1.304289826128987, + "learning_rate": 0.0001950973120401059, + "loss": 1.6795, + "step": 4938 + }, + { + "epoch": 3.188508715300194, + "grad_norm": 1.348972293786404, + "learning_rate": 0.00019503569050213825, + "loss": 1.7126, + "step": 4939 + }, + { + "epoch": 3.189154293092318, + "grad_norm": 1.354187528901284, + "learning_rate": 0.00019497406060958248, + "loss": 1.8265, + "step": 4940 + }, + { + "epoch": 3.1897998708844417, + "grad_norm": 1.3205835393108838, + "learning_rate": 0.00019491242237387152, + "loss": 1.5269, + "step": 4941 + }, + { + "epoch": 3.1904454486765657, + "grad_norm": 1.4049347801169836, + "learning_rate": 0.00019485077580643992, + "loss": 1.7931, + "step": 4942 + }, + { + "epoch": 3.1910910264686896, + "grad_norm": 1.247833924158416, + "learning_rate": 0.00019478912091872372, + "loss": 1.5716, + "step": 4943 + }, + { + "epoch": 3.1917366042608135, + "grad_norm": 1.2569369479718357, + "learning_rate": 0.0001947274577221606, + "loss": 1.6314, + "step": 4944 + }, + { + "epoch": 3.1923821820529374, + "grad_norm": 1.4811478136147591, + "learning_rate": 0.0001946657862281897, + "loss": 1.6719, + "step": 4945 + }, + { + "epoch": 3.1930277598450614, + "grad_norm": 1.4797261705966216, + "learning_rate": 0.00019460410644825163, + "loss": 1.7381, + "step": 4946 + }, + { + "epoch": 3.1936733376371853, + "grad_norm": 1.3854644494316695, + "learning_rate": 0.0001945424183937888, + "loss": 1.523, + "step": 4947 + }, + { + "epoch": 3.1943189154293092, + "grad_norm": 1.3444997841472506, + "learning_rate": 0.00019448072207624496, + "loss": 1.6603, + "step": 4948 + }, + { + "epoch": 3.194964493221433, + "grad_norm": 1.388391081718362, + "learning_rate": 0.0001944190175070652, + "loss": 1.6137, + "step": 4949 + }, + { + "epoch": 3.195610071013557, + "grad_norm": 1.3893700018023336, + "learning_rate": 0.0001943573046976966, + "loss": 1.5244, + "step": 4950 + }, + { + "epoch": 3.196255648805681, + "grad_norm": 1.4949769037078178, + "learning_rate": 0.00019429558365958744, + "loss": 1.7908, + "step": 4951 + }, + { + "epoch": 3.196901226597805, + "grad_norm": 1.4276641367691176, + "learning_rate": 0.0001942338544041876, + "loss": 1.483, + "step": 4952 + }, + { + "epoch": 3.197546804389929, + "grad_norm": 1.3752435549751625, + "learning_rate": 0.0001941721169429485, + "loss": 1.7449, + "step": 4953 + }, + { + "epoch": 3.198192382182053, + "grad_norm": 1.4547719411231503, + "learning_rate": 0.00019411037128732315, + "loss": 1.8296, + "step": 4954 + }, + { + "epoch": 3.1988379599741767, + "grad_norm": 1.5907039686794713, + "learning_rate": 0.00019404861744876594, + "loss": 1.829, + "step": 4955 + }, + { + "epoch": 3.1994835377663007, + "grad_norm": 1.649774325379199, + "learning_rate": 0.0001939868554387328, + "loss": 1.8155, + "step": 4956 + }, + { + "epoch": 3.2001291155584246, + "grad_norm": 1.437640914637895, + "learning_rate": 0.00019392508526868131, + "loss": 1.8838, + "step": 4957 + }, + { + "epoch": 3.2007746933505485, + "grad_norm": 1.3908304800608462, + "learning_rate": 0.00019386330695007046, + "loss": 1.8556, + "step": 4958 + }, + { + "epoch": 3.2014202711426725, + "grad_norm": 1.228206941684653, + "learning_rate": 0.00019380152049436084, + "loss": 1.4863, + "step": 4959 + }, + { + "epoch": 3.202065848934797, + "grad_norm": 1.307928925162032, + "learning_rate": 0.00019373972591301428, + "loss": 1.6543, + "step": 4960 + }, + { + "epoch": 3.2027114267269208, + "grad_norm": 1.4176392218494298, + "learning_rate": 0.00019367792321749451, + "loss": 1.6562, + "step": 4961 + }, + { + "epoch": 3.2033570045190447, + "grad_norm": 1.4629592539450227, + "learning_rate": 0.00019361611241926652, + "loss": 1.8646, + "step": 4962 + }, + { + "epoch": 3.2040025823111686, + "grad_norm": 1.4891141032561863, + "learning_rate": 0.00019355429352979677, + "loss": 1.8546, + "step": 4963 + }, + { + "epoch": 3.2046481601032926, + "grad_norm": 1.3425876313274576, + "learning_rate": 0.0001934924665605534, + "loss": 1.9895, + "step": 4964 + }, + { + "epoch": 3.2052937378954165, + "grad_norm": 1.2594481131463724, + "learning_rate": 0.00019343063152300592, + "loss": 1.5079, + "step": 4965 + }, + { + "epoch": 3.2059393156875404, + "grad_norm": 1.1024568017305993, + "learning_rate": 0.0001933687884286254, + "loss": 1.258, + "step": 4966 + }, + { + "epoch": 3.2065848934796644, + "grad_norm": 1.3506453483397127, + "learning_rate": 0.00019330693728888433, + "loss": 1.579, + "step": 4967 + }, + { + "epoch": 3.2072304712717883, + "grad_norm": 1.574993892813194, + "learning_rate": 0.0001932450781152567, + "loss": 1.5465, + "step": 4968 + }, + { + "epoch": 3.207876049063912, + "grad_norm": 1.4742388296308067, + "learning_rate": 0.00019318321091921824, + "loss": 1.7685, + "step": 4969 + }, + { + "epoch": 3.208521626856036, + "grad_norm": 1.313959587491757, + "learning_rate": 0.00019312133571224572, + "loss": 1.5586, + "step": 4970 + }, + { + "epoch": 3.20916720464816, + "grad_norm": 1.643387916581867, + "learning_rate": 0.00019305945250581768, + "loss": 1.8039, + "step": 4971 + }, + { + "epoch": 3.209812782440284, + "grad_norm": 1.385373885846747, + "learning_rate": 0.00019299756131141421, + "loss": 1.4712, + "step": 4972 + }, + { + "epoch": 3.210458360232408, + "grad_norm": 1.5620808216039046, + "learning_rate": 0.0001929356621405167, + "loss": 1.5227, + "step": 4973 + }, + { + "epoch": 3.211103938024532, + "grad_norm": 1.376106196888487, + "learning_rate": 0.00019287375500460802, + "loss": 1.4957, + "step": 4974 + }, + { + "epoch": 3.211749515816656, + "grad_norm": 1.390547120462553, + "learning_rate": 0.00019281183991517268, + "loss": 1.4849, + "step": 4975 + }, + { + "epoch": 3.2123950936087797, + "grad_norm": 1.511657786901615, + "learning_rate": 0.00019274991688369655, + "loss": 1.7253, + "step": 4976 + }, + { + "epoch": 3.2130406714009037, + "grad_norm": 1.379154848758748, + "learning_rate": 0.00019268798592166704, + "loss": 1.6691, + "step": 4977 + }, + { + "epoch": 3.2136862491930276, + "grad_norm": 1.394058196164499, + "learning_rate": 0.0001926260470405729, + "loss": 1.6599, + "step": 4978 + }, + { + "epoch": 3.2143318269851515, + "grad_norm": 1.2234385227475184, + "learning_rate": 0.00019256410025190443, + "loss": 1.4152, + "step": 4979 + }, + { + "epoch": 3.214977404777276, + "grad_norm": 1.3411479489045692, + "learning_rate": 0.0001925021455671535, + "loss": 1.8453, + "step": 4980 + }, + { + "epoch": 3.2156229825694, + "grad_norm": 1.253353818006331, + "learning_rate": 0.00019244018299781327, + "loss": 1.5958, + "step": 4981 + }, + { + "epoch": 3.2162685603615238, + "grad_norm": 1.4769297176203946, + "learning_rate": 0.00019237821255537843, + "loss": 1.6866, + "step": 4982 + }, + { + "epoch": 3.2169141381536477, + "grad_norm": 1.2373852008060529, + "learning_rate": 0.0001923162342513452, + "loss": 1.6338, + "step": 4983 + }, + { + "epoch": 3.2175597159457716, + "grad_norm": 1.32744337174061, + "learning_rate": 0.00019225424809721122, + "loss": 1.7168, + "step": 4984 + }, + { + "epoch": 3.2182052937378955, + "grad_norm": 1.4496916386138463, + "learning_rate": 0.00019219225410447544, + "loss": 1.739, + "step": 4985 + }, + { + "epoch": 3.2188508715300195, + "grad_norm": 1.3927318290347532, + "learning_rate": 0.0001921302522846385, + "loss": 1.5863, + "step": 4986 + }, + { + "epoch": 3.2194964493221434, + "grad_norm": 1.5050883393588534, + "learning_rate": 0.0001920682426492024, + "loss": 1.6811, + "step": 4987 + }, + { + "epoch": 3.2201420271142673, + "grad_norm": 1.3609843998836237, + "learning_rate": 0.00019200622520967043, + "loss": 1.6141, + "step": 4988 + }, + { + "epoch": 3.2207876049063913, + "grad_norm": 1.4181465724580589, + "learning_rate": 0.0001919441999775476, + "loss": 1.6508, + "step": 4989 + }, + { + "epoch": 3.221433182698515, + "grad_norm": 1.5365953963393995, + "learning_rate": 0.00019188216696434016, + "loss": 1.6751, + "step": 4990 + }, + { + "epoch": 3.222078760490639, + "grad_norm": 1.3357489460219258, + "learning_rate": 0.00019182012618155594, + "loss": 1.6322, + "step": 4991 + }, + { + "epoch": 3.222724338282763, + "grad_norm": 1.3346678130512561, + "learning_rate": 0.0001917580776407041, + "loss": 1.5692, + "step": 4992 + }, + { + "epoch": 3.223369916074887, + "grad_norm": 1.19447971786181, + "learning_rate": 0.0001916960213532953, + "loss": 1.381, + "step": 4993 + }, + { + "epoch": 3.224015493867011, + "grad_norm": 1.5787671574857154, + "learning_rate": 0.0001916339573308417, + "loss": 1.8193, + "step": 4994 + }, + { + "epoch": 3.224661071659135, + "grad_norm": 1.4798559259792194, + "learning_rate": 0.0001915718855848567, + "loss": 1.6225, + "step": 4995 + }, + { + "epoch": 3.225306649451259, + "grad_norm": 1.4466100877605474, + "learning_rate": 0.00019150980612685526, + "loss": 1.7566, + "step": 4996 + }, + { + "epoch": 3.2259522272433827, + "grad_norm": 1.389895007152384, + "learning_rate": 0.00019144771896835387, + "loss": 1.6682, + "step": 4997 + }, + { + "epoch": 3.2265978050355066, + "grad_norm": 1.418450778399317, + "learning_rate": 0.0001913856241208703, + "loss": 1.5365, + "step": 4998 + }, + { + "epoch": 3.2272433828276306, + "grad_norm": 1.3287561223659867, + "learning_rate": 0.00019132352159592373, + "loss": 1.6804, + "step": 4999 + }, + { + "epoch": 3.2278889606197545, + "grad_norm": 1.3470218229199953, + "learning_rate": 0.00019126141140503487, + "loss": 1.612, + "step": 5000 + }, + { + "epoch": 3.2285345384118784, + "grad_norm": 1.3316268309066164, + "learning_rate": 0.00019119929355972584, + "loss": 1.7163, + "step": 5001 + }, + { + "epoch": 3.2291801162040024, + "grad_norm": 1.2608530901113146, + "learning_rate": 0.00019113716807152008, + "loss": 1.6189, + "step": 5002 + }, + { + "epoch": 3.2298256939961267, + "grad_norm": 1.3707494294991944, + "learning_rate": 0.00019107503495194254, + "loss": 1.7027, + "step": 5003 + }, + { + "epoch": 3.2304712717882507, + "grad_norm": 1.576987978930131, + "learning_rate": 0.0001910128942125195, + "loss": 1.7861, + "step": 5004 + }, + { + "epoch": 3.2311168495803746, + "grad_norm": 1.3959656803391591, + "learning_rate": 0.00019095074586477888, + "loss": 1.8295, + "step": 5005 + }, + { + "epoch": 3.2317624273724985, + "grad_norm": 1.3403647394763738, + "learning_rate": 0.00019088858992024967, + "loss": 1.7224, + "step": 5006 + }, + { + "epoch": 3.2324080051646225, + "grad_norm": 1.4518023745964173, + "learning_rate": 0.00019082642639046252, + "loss": 1.8402, + "step": 5007 + }, + { + "epoch": 3.2330535829567464, + "grad_norm": 1.680588677801361, + "learning_rate": 0.0001907642552869494, + "loss": 1.7304, + "step": 5008 + }, + { + "epoch": 3.2336991607488703, + "grad_norm": 1.7886419500397406, + "learning_rate": 0.0001907020766212437, + "loss": 1.9825, + "step": 5009 + }, + { + "epoch": 3.2343447385409942, + "grad_norm": 1.714701340625915, + "learning_rate": 0.00019063989040488016, + "loss": 1.726, + "step": 5010 + }, + { + "epoch": 3.234990316333118, + "grad_norm": 1.4547707445985585, + "learning_rate": 0.00019057769664939503, + "loss": 1.8578, + "step": 5011 + }, + { + "epoch": 3.235635894125242, + "grad_norm": 1.3443097511848203, + "learning_rate": 0.0001905154953663259, + "loss": 1.8287, + "step": 5012 + }, + { + "epoch": 3.236281471917366, + "grad_norm": 1.3895770877382958, + "learning_rate": 0.0001904532865672117, + "loss": 1.7472, + "step": 5013 + }, + { + "epoch": 3.23692704970949, + "grad_norm": 1.3120570975382964, + "learning_rate": 0.0001903910702635929, + "loss": 1.5146, + "step": 5014 + }, + { + "epoch": 3.237572627501614, + "grad_norm": 1.400150981927146, + "learning_rate": 0.00019032884646701115, + "loss": 1.6713, + "step": 5015 + }, + { + "epoch": 3.238218205293738, + "grad_norm": 1.494623846532788, + "learning_rate": 0.00019026661518900965, + "loss": 1.8769, + "step": 5016 + }, + { + "epoch": 3.2388637830858618, + "grad_norm": 1.4456508149566898, + "learning_rate": 0.000190204376441133, + "loss": 1.6917, + "step": 5017 + }, + { + "epoch": 3.2395093608779857, + "grad_norm": 1.4428636998135072, + "learning_rate": 0.0001901421302349271, + "loss": 1.6044, + "step": 5018 + }, + { + "epoch": 3.2401549386701096, + "grad_norm": 1.2840019371829894, + "learning_rate": 0.00019007987658193926, + "loss": 1.5787, + "step": 5019 + }, + { + "epoch": 3.2408005164622335, + "grad_norm": 1.3943681007639808, + "learning_rate": 0.0001900176154937182, + "loss": 1.8048, + "step": 5020 + }, + { + "epoch": 3.2414460942543575, + "grad_norm": 1.4905007678591415, + "learning_rate": 0.00018995534698181395, + "loss": 1.7721, + "step": 5021 + }, + { + "epoch": 3.2420916720464814, + "grad_norm": 1.444525949566186, + "learning_rate": 0.00018989307105777802, + "loss": 1.6612, + "step": 5022 + }, + { + "epoch": 3.242737249838606, + "grad_norm": 1.460274560857222, + "learning_rate": 0.0001898307877331633, + "loss": 2.0197, + "step": 5023 + }, + { + "epoch": 3.2433828276307297, + "grad_norm": 1.35407535975532, + "learning_rate": 0.00018976849701952378, + "loss": 1.7123, + "step": 5024 + }, + { + "epoch": 3.2440284054228536, + "grad_norm": 1.4249610946937807, + "learning_rate": 0.00018970619892841526, + "loss": 1.782, + "step": 5025 + }, + { + "epoch": 3.2446739832149776, + "grad_norm": 1.2795690140218832, + "learning_rate": 0.00018964389347139458, + "loss": 1.5073, + "step": 5026 + }, + { + "epoch": 3.2453195610071015, + "grad_norm": 1.3871158684177722, + "learning_rate": 0.00018958158066002004, + "loss": 1.7786, + "step": 5027 + }, + { + "epoch": 3.2459651387992254, + "grad_norm": 1.446841706037584, + "learning_rate": 0.00018951926050585135, + "loss": 1.7026, + "step": 5028 + }, + { + "epoch": 3.2466107165913494, + "grad_norm": 1.4137034898766199, + "learning_rate": 0.0001894569330204495, + "loss": 1.7056, + "step": 5029 + }, + { + "epoch": 3.2472562943834733, + "grad_norm": 1.3530954673263116, + "learning_rate": 0.00018939459821537696, + "loss": 1.7454, + "step": 5030 + }, + { + "epoch": 3.2479018721755972, + "grad_norm": 1.324423601076757, + "learning_rate": 0.00018933225610219746, + "loss": 1.6115, + "step": 5031 + }, + { + "epoch": 3.248547449967721, + "grad_norm": 1.3608853133785352, + "learning_rate": 0.000189269906692476, + "loss": 1.6854, + "step": 5032 + }, + { + "epoch": 3.249193027759845, + "grad_norm": 1.3173407272393785, + "learning_rate": 0.00018920754999777917, + "loss": 1.6912, + "step": 5033 + }, + { + "epoch": 3.249838605551969, + "grad_norm": 1.344113623597989, + "learning_rate": 0.0001891451860296748, + "loss": 1.6238, + "step": 5034 + }, + { + "epoch": 3.250484183344093, + "grad_norm": 1.261365255493469, + "learning_rate": 0.00018908281479973192, + "loss": 1.429, + "step": 5035 + }, + { + "epoch": 3.251129761136217, + "grad_norm": 1.2114823446829885, + "learning_rate": 0.00018902043631952115, + "loss": 1.3594, + "step": 5036 + }, + { + "epoch": 3.251775338928341, + "grad_norm": 1.6371576778091128, + "learning_rate": 0.00018895805060061433, + "loss": 1.8854, + "step": 5037 + }, + { + "epoch": 3.2524209167204647, + "grad_norm": 1.3440821346291667, + "learning_rate": 0.00018889565765458467, + "loss": 1.6602, + "step": 5038 + }, + { + "epoch": 3.2530664945125887, + "grad_norm": 1.3550919117113511, + "learning_rate": 0.00018883325749300664, + "loss": 1.6109, + "step": 5039 + }, + { + "epoch": 3.2537120723047126, + "grad_norm": 1.2259946894022224, + "learning_rate": 0.00018877085012745614, + "loss": 1.5171, + "step": 5040 + }, + { + "epoch": 3.2543576500968365, + "grad_norm": 1.1636063553816838, + "learning_rate": 0.00018870843556951047, + "loss": 1.4862, + "step": 5041 + }, + { + "epoch": 3.2550032278889605, + "grad_norm": 1.3854295639463454, + "learning_rate": 0.00018864601383074807, + "loss": 1.6837, + "step": 5042 + }, + { + "epoch": 3.2556488056810844, + "grad_norm": 1.3388374961642935, + "learning_rate": 0.00018858358492274886, + "loss": 1.653, + "step": 5043 + }, + { + "epoch": 3.2562943834732083, + "grad_norm": 1.2879440651068605, + "learning_rate": 0.00018852114885709408, + "loss": 1.5279, + "step": 5044 + }, + { + "epoch": 3.2569399612653323, + "grad_norm": 1.451969169105987, + "learning_rate": 0.00018845870564536627, + "loss": 1.7514, + "step": 5045 + }, + { + "epoch": 3.257585539057456, + "grad_norm": 1.4843042179859127, + "learning_rate": 0.0001883962552991492, + "loss": 1.9019, + "step": 5046 + }, + { + "epoch": 3.2582311168495806, + "grad_norm": 1.3351225406593872, + "learning_rate": 0.00018833379783002815, + "loss": 1.4542, + "step": 5047 + }, + { + "epoch": 3.2588766946417045, + "grad_norm": 1.4031192466213638, + "learning_rate": 0.00018827133324958963, + "loss": 1.5913, + "step": 5048 + }, + { + "epoch": 3.2595222724338284, + "grad_norm": 1.3463611815409613, + "learning_rate": 0.0001882088615694214, + "loss": 1.5569, + "step": 5049 + }, + { + "epoch": 3.2601678502259523, + "grad_norm": 1.645186821444749, + "learning_rate": 0.0001881463828011127, + "loss": 1.7156, + "step": 5050 + }, + { + "epoch": 3.2608134280180763, + "grad_norm": 1.4987275741157091, + "learning_rate": 0.00018808389695625392, + "loss": 1.6542, + "step": 5051 + }, + { + "epoch": 3.2614590058102, + "grad_norm": 1.4474700336303947, + "learning_rate": 0.00018802140404643685, + "loss": 1.7846, + "step": 5052 + }, + { + "epoch": 3.262104583602324, + "grad_norm": 1.8395897888805008, + "learning_rate": 0.00018795890408325455, + "loss": 1.7371, + "step": 5053 + }, + { + "epoch": 3.262750161394448, + "grad_norm": 1.7600231969457185, + "learning_rate": 0.00018789639707830142, + "loss": 1.7391, + "step": 5054 + }, + { + "epoch": 3.263395739186572, + "grad_norm": 1.3734641149888978, + "learning_rate": 0.00018783388304317324, + "loss": 1.5643, + "step": 5055 + }, + { + "epoch": 3.264041316978696, + "grad_norm": 1.4488168416100122, + "learning_rate": 0.00018777136198946693, + "loss": 1.9284, + "step": 5056 + }, + { + "epoch": 3.26468689477082, + "grad_norm": 1.3817757221585738, + "learning_rate": 0.00018770883392878077, + "loss": 1.7152, + "step": 5057 + }, + { + "epoch": 3.265332472562944, + "grad_norm": 1.5770813834299524, + "learning_rate": 0.00018764629887271444, + "loss": 1.8643, + "step": 5058 + }, + { + "epoch": 3.2659780503550677, + "grad_norm": 1.2923541416803577, + "learning_rate": 0.00018758375683286885, + "loss": 1.5226, + "step": 5059 + }, + { + "epoch": 3.2666236281471916, + "grad_norm": 1.373439589924816, + "learning_rate": 0.0001875212078208461, + "loss": 1.7898, + "step": 5060 + }, + { + "epoch": 3.2672692059393156, + "grad_norm": 1.2436350545150279, + "learning_rate": 0.00018745865184824971, + "loss": 1.5346, + "step": 5061 + }, + { + "epoch": 3.2679147837314395, + "grad_norm": 1.3439824248816419, + "learning_rate": 0.00018739608892668456, + "loss": 1.8124, + "step": 5062 + }, + { + "epoch": 3.2685603615235634, + "grad_norm": 1.3958859384689648, + "learning_rate": 0.00018733351906775664, + "loss": 1.6858, + "step": 5063 + }, + { + "epoch": 3.2692059393156874, + "grad_norm": 1.3564101074219597, + "learning_rate": 0.00018727094228307328, + "loss": 1.5891, + "step": 5064 + }, + { + "epoch": 3.2698515171078113, + "grad_norm": 1.4546353007941373, + "learning_rate": 0.0001872083585842432, + "loss": 1.678, + "step": 5065 + }, + { + "epoch": 3.2704970948999357, + "grad_norm": 1.5267172305159964, + "learning_rate": 0.0001871457679828763, + "loss": 1.6573, + "step": 5066 + }, + { + "epoch": 3.2711426726920596, + "grad_norm": 1.2858871122941657, + "learning_rate": 0.00018708317049058373, + "loss": 1.5597, + "step": 5067 + }, + { + "epoch": 3.2717882504841835, + "grad_norm": 1.3089682567891483, + "learning_rate": 0.00018702056611897804, + "loss": 1.5154, + "step": 5068 + }, + { + "epoch": 3.2724338282763075, + "grad_norm": 1.2595750641992496, + "learning_rate": 0.000186957954879673, + "loss": 1.4137, + "step": 5069 + }, + { + "epoch": 3.2730794060684314, + "grad_norm": 1.5656863716655045, + "learning_rate": 0.00018689533678428359, + "loss": 1.7123, + "step": 5070 + }, + { + "epoch": 3.2737249838605553, + "grad_norm": 1.257086872538844, + "learning_rate": 0.0001868327118444261, + "loss": 1.2643, + "step": 5071 + }, + { + "epoch": 3.2743705616526793, + "grad_norm": 1.870640619258664, + "learning_rate": 0.00018677008007171822, + "loss": 1.7873, + "step": 5072 + }, + { + "epoch": 3.275016139444803, + "grad_norm": 1.4987190835341218, + "learning_rate": 0.00018670744147777872, + "loss": 1.4634, + "step": 5073 + }, + { + "epoch": 3.275661717236927, + "grad_norm": 1.5660196193925704, + "learning_rate": 0.00018664479607422767, + "loss": 1.8096, + "step": 5074 + }, + { + "epoch": 3.276307295029051, + "grad_norm": 1.6139281503983933, + "learning_rate": 0.0001865821438726865, + "loss": 1.8024, + "step": 5075 + }, + { + "epoch": 3.276952872821175, + "grad_norm": 1.6978839278454314, + "learning_rate": 0.00018651948488477784, + "loss": 1.9935, + "step": 5076 + }, + { + "epoch": 3.277598450613299, + "grad_norm": 1.546752616110098, + "learning_rate": 0.0001864568191221256, + "loss": 1.8256, + "step": 5077 + }, + { + "epoch": 3.278244028405423, + "grad_norm": 1.418685669534943, + "learning_rate": 0.00018639414659635487, + "loss": 1.6962, + "step": 5078 + }, + { + "epoch": 3.2788896061975468, + "grad_norm": 1.2992043745378972, + "learning_rate": 0.00018633146731909204, + "loss": 1.5825, + "step": 5079 + }, + { + "epoch": 3.2795351839896707, + "grad_norm": 1.361560893928601, + "learning_rate": 0.0001862687813019649, + "loss": 1.7237, + "step": 5080 + }, + { + "epoch": 3.2801807617817946, + "grad_norm": 1.270579164416901, + "learning_rate": 0.00018620608855660225, + "loss": 1.3628, + "step": 5081 + }, + { + "epoch": 3.2808263395739186, + "grad_norm": 1.320079961688034, + "learning_rate": 0.00018614338909463422, + "loss": 1.6602, + "step": 5082 + }, + { + "epoch": 3.2814719173660425, + "grad_norm": 1.3912783317114823, + "learning_rate": 0.0001860806829276923, + "loss": 1.7529, + "step": 5083 + }, + { + "epoch": 3.2821174951581664, + "grad_norm": 1.413554660633052, + "learning_rate": 0.0001860179700674091, + "loss": 1.6037, + "step": 5084 + }, + { + "epoch": 3.2827630729502904, + "grad_norm": 1.2128916768868099, + "learning_rate": 0.00018595525052541846, + "loss": 1.5104, + "step": 5085 + }, + { + "epoch": 3.2834086507424143, + "grad_norm": 1.652163138708709, + "learning_rate": 0.00018589252431335556, + "loss": 1.6039, + "step": 5086 + }, + { + "epoch": 3.284054228534538, + "grad_norm": 1.384129587002356, + "learning_rate": 0.00018582979144285674, + "loss": 1.8353, + "step": 5087 + }, + { + "epoch": 3.284699806326662, + "grad_norm": 1.4625113999141621, + "learning_rate": 0.00018576705192555965, + "loss": 1.8763, + "step": 5088 + }, + { + "epoch": 3.285345384118786, + "grad_norm": 1.6255394875313036, + "learning_rate": 0.00018570430577310304, + "loss": 1.7387, + "step": 5089 + }, + { + "epoch": 3.2859909619109104, + "grad_norm": 1.383877529383636, + "learning_rate": 0.00018564155299712702, + "loss": 1.6892, + "step": 5090 + }, + { + "epoch": 3.2866365397030344, + "grad_norm": 1.3705301297290629, + "learning_rate": 0.00018557879360927288, + "loss": 1.6043, + "step": 5091 + }, + { + "epoch": 3.2872821174951583, + "grad_norm": 1.494735575605147, + "learning_rate": 0.0001855160276211831, + "loss": 1.863, + "step": 5092 + }, + { + "epoch": 3.2879276952872822, + "grad_norm": 1.2347044552366273, + "learning_rate": 0.00018545325504450144, + "loss": 1.3595, + "step": 5093 + }, + { + "epoch": 3.288573273079406, + "grad_norm": 1.3266028789846254, + "learning_rate": 0.00018539047589087294, + "loss": 1.6421, + "step": 5094 + }, + { + "epoch": 3.28921885087153, + "grad_norm": 1.5982233835631425, + "learning_rate": 0.00018532769017194373, + "loss": 1.6392, + "step": 5095 + }, + { + "epoch": 3.289864428663654, + "grad_norm": 1.346056501094947, + "learning_rate": 0.00018526489789936115, + "loss": 1.4533, + "step": 5096 + }, + { + "epoch": 3.290510006455778, + "grad_norm": 1.5299424691749959, + "learning_rate": 0.0001852020990847739, + "loss": 2.0542, + "step": 5097 + }, + { + "epoch": 3.291155584247902, + "grad_norm": 1.3294333642410785, + "learning_rate": 0.00018513929373983182, + "loss": 1.6231, + "step": 5098 + }, + { + "epoch": 3.291801162040026, + "grad_norm": 1.4544732464372288, + "learning_rate": 0.00018507648187618585, + "loss": 1.849, + "step": 5099 + }, + { + "epoch": 3.2924467398321497, + "grad_norm": 1.379367290603189, + "learning_rate": 0.00018501366350548836, + "loss": 1.7759, + "step": 5100 + }, + { + "epoch": 3.2930923176242737, + "grad_norm": 1.4536715161515057, + "learning_rate": 0.00018495083863939274, + "loss": 1.7801, + "step": 5101 + }, + { + "epoch": 3.2937378954163976, + "grad_norm": 1.537887514586069, + "learning_rate": 0.0001848880072895537, + "loss": 1.7743, + "step": 5102 + }, + { + "epoch": 3.2943834732085215, + "grad_norm": 1.4169344218182303, + "learning_rate": 0.00018482516946762709, + "loss": 1.6412, + "step": 5103 + }, + { + "epoch": 3.2950290510006455, + "grad_norm": 1.402167131618854, + "learning_rate": 0.00018476232518526992, + "loss": 1.9331, + "step": 5104 + }, + { + "epoch": 3.2956746287927694, + "grad_norm": 1.4001975656013612, + "learning_rate": 0.00018469947445414055, + "loss": 1.8401, + "step": 5105 + }, + { + "epoch": 3.2963202065848933, + "grad_norm": 1.3084361965752216, + "learning_rate": 0.0001846366172858984, + "loss": 1.7109, + "step": 5106 + }, + { + "epoch": 3.2969657843770173, + "grad_norm": 1.319063549013095, + "learning_rate": 0.0001845737536922041, + "loss": 1.9095, + "step": 5107 + }, + { + "epoch": 3.297611362169141, + "grad_norm": 1.2907874544899662, + "learning_rate": 0.0001845108836847196, + "loss": 1.7976, + "step": 5108 + }, + { + "epoch": 3.2982569399612656, + "grad_norm": 1.3039758936254446, + "learning_rate": 0.00018444800727510785, + "loss": 1.4656, + "step": 5109 + }, + { + "epoch": 3.2989025177533895, + "grad_norm": 1.3765614278905758, + "learning_rate": 0.00018438512447503305, + "loss": 1.6566, + "step": 5110 + }, + { + "epoch": 3.2995480955455134, + "grad_norm": 1.5249966949960196, + "learning_rate": 0.0001843222352961607, + "loss": 1.8705, + "step": 5111 + }, + { + "epoch": 3.3001936733376374, + "grad_norm": 1.4737394198175475, + "learning_rate": 0.00018425933975015735, + "loss": 1.7945, + "step": 5112 + }, + { + "epoch": 3.3008392511297613, + "grad_norm": 1.5243968682954285, + "learning_rate": 0.00018419643784869076, + "loss": 1.6501, + "step": 5113 + }, + { + "epoch": 3.301484828921885, + "grad_norm": 1.29779304866212, + "learning_rate": 0.00018413352960342997, + "loss": 1.5135, + "step": 5114 + }, + { + "epoch": 3.302130406714009, + "grad_norm": 1.2992742226004173, + "learning_rate": 0.00018407061502604498, + "loss": 1.4491, + "step": 5115 + }, + { + "epoch": 3.302775984506133, + "grad_norm": 1.2958092969932737, + "learning_rate": 0.0001840076941282072, + "loss": 1.3509, + "step": 5116 + }, + { + "epoch": 3.303421562298257, + "grad_norm": 1.387634436770328, + "learning_rate": 0.00018394476692158905, + "loss": 1.6753, + "step": 5117 + }, + { + "epoch": 3.304067140090381, + "grad_norm": 1.5336869381119353, + "learning_rate": 0.00018388183341786418, + "loss": 1.6573, + "step": 5118 + }, + { + "epoch": 3.304712717882505, + "grad_norm": 1.4775317253849745, + "learning_rate": 0.00018381889362870747, + "loss": 1.6768, + "step": 5119 + }, + { + "epoch": 3.305358295674629, + "grad_norm": 1.348679122751705, + "learning_rate": 0.00018375594756579492, + "loss": 1.6332, + "step": 5120 + }, + { + "epoch": 3.3060038734667527, + "grad_norm": 1.308774583996847, + "learning_rate": 0.00018369299524080356, + "loss": 1.6111, + "step": 5121 + }, + { + "epoch": 3.3066494512588767, + "grad_norm": 1.3273623704289776, + "learning_rate": 0.00018363003666541178, + "loss": 1.571, + "step": 5122 + }, + { + "epoch": 3.3072950290510006, + "grad_norm": 1.453258477254789, + "learning_rate": 0.00018356707185129912, + "loss": 1.4951, + "step": 5123 + }, + { + "epoch": 3.3079406068431245, + "grad_norm": 1.5162511729790407, + "learning_rate": 0.000183504100810146, + "loss": 1.7612, + "step": 5124 + }, + { + "epoch": 3.3085861846352485, + "grad_norm": 1.2094136726183489, + "learning_rate": 0.0001834411235536344, + "loss": 1.4589, + "step": 5125 + }, + { + "epoch": 3.3092317624273724, + "grad_norm": 1.5740391657320503, + "learning_rate": 0.00018337814009344714, + "loss": 1.6677, + "step": 5126 + }, + { + "epoch": 3.3098773402194963, + "grad_norm": 1.2842484040228064, + "learning_rate": 0.00018331515044126839, + "loss": 1.7589, + "step": 5127 + }, + { + "epoch": 3.3105229180116202, + "grad_norm": 1.4641882130311537, + "learning_rate": 0.0001832521546087833, + "loss": 1.827, + "step": 5128 + }, + { + "epoch": 3.311168495803744, + "grad_norm": 1.3243033390716137, + "learning_rate": 0.00018318915260767828, + "loss": 1.7206, + "step": 5129 + }, + { + "epoch": 3.311814073595868, + "grad_norm": 1.3725846259409031, + "learning_rate": 0.00018312614444964095, + "loss": 1.7192, + "step": 5130 + }, + { + "epoch": 3.312459651387992, + "grad_norm": 1.4368685217578465, + "learning_rate": 0.00018306313014635985, + "loss": 1.8577, + "step": 5131 + }, + { + "epoch": 3.313105229180116, + "grad_norm": 1.2943583935057823, + "learning_rate": 0.00018300010970952478, + "loss": 1.5984, + "step": 5132 + }, + { + "epoch": 3.3137508069722403, + "grad_norm": 1.4282231678911468, + "learning_rate": 0.0001829370831508268, + "loss": 1.6654, + "step": 5133 + }, + { + "epoch": 3.3143963847643643, + "grad_norm": 1.613161162373886, + "learning_rate": 0.00018287405048195794, + "loss": 1.636, + "step": 5134 + }, + { + "epoch": 3.315041962556488, + "grad_norm": 1.2776147121612749, + "learning_rate": 0.00018281101171461133, + "loss": 1.523, + "step": 5135 + }, + { + "epoch": 3.315687540348612, + "grad_norm": 1.5707524512075293, + "learning_rate": 0.00018274796686048144, + "loss": 1.8422, + "step": 5136 + }, + { + "epoch": 3.316333118140736, + "grad_norm": 1.2877551228409443, + "learning_rate": 0.00018268491593126368, + "loss": 1.5293, + "step": 5137 + }, + { + "epoch": 3.31697869593286, + "grad_norm": 1.4016878510172794, + "learning_rate": 0.00018262185893865466, + "loss": 1.7261, + "step": 5138 + }, + { + "epoch": 3.317624273724984, + "grad_norm": 1.4064236296575183, + "learning_rate": 0.00018255879589435214, + "loss": 1.7097, + "step": 5139 + }, + { + "epoch": 3.318269851517108, + "grad_norm": 1.5064076855079542, + "learning_rate": 0.0001824957268100549, + "loss": 1.884, + "step": 5140 + }, + { + "epoch": 3.318915429309232, + "grad_norm": 1.4654689103467018, + "learning_rate": 0.000182432651697463, + "loss": 1.7447, + "step": 5141 + }, + { + "epoch": 3.3195610071013557, + "grad_norm": 1.278031243284626, + "learning_rate": 0.00018236957056827745, + "loss": 1.7375, + "step": 5142 + }, + { + "epoch": 3.3202065848934796, + "grad_norm": 1.1231055973540662, + "learning_rate": 0.00018230648343420047, + "loss": 1.3254, + "step": 5143 + }, + { + "epoch": 3.3208521626856036, + "grad_norm": 1.47390444622772, + "learning_rate": 0.0001822433903069354, + "loss": 1.8969, + "step": 5144 + }, + { + "epoch": 3.3214977404777275, + "grad_norm": 1.6071435886095298, + "learning_rate": 0.00018218029119818673, + "loss": 1.8952, + "step": 5145 + }, + { + "epoch": 3.3221433182698514, + "grad_norm": 1.3860528782666675, + "learning_rate": 0.00018211718611965986, + "loss": 1.7982, + "step": 5146 + }, + { + "epoch": 3.3227888960619754, + "grad_norm": 1.3315558515038308, + "learning_rate": 0.00018205407508306156, + "loss": 1.4951, + "step": 5147 + }, + { + "epoch": 3.3234344738540993, + "grad_norm": 1.5121592009962825, + "learning_rate": 0.0001819909581000995, + "loss": 2.064, + "step": 5148 + }, + { + "epoch": 3.324080051646223, + "grad_norm": 1.710238139010401, + "learning_rate": 0.00018192783518248257, + "loss": 1.8767, + "step": 5149 + }, + { + "epoch": 3.324725629438347, + "grad_norm": 1.459710060427723, + "learning_rate": 0.00018186470634192075, + "loss": 1.7111, + "step": 5150 + }, + { + "epoch": 3.325371207230471, + "grad_norm": 1.3765387389020811, + "learning_rate": 0.00018180157159012505, + "loss": 1.9506, + "step": 5151 + }, + { + "epoch": 3.3260167850225955, + "grad_norm": 1.3301637880097124, + "learning_rate": 0.00018173843093880764, + "loss": 1.7004, + "step": 5152 + }, + { + "epoch": 3.3266623628147194, + "grad_norm": 1.5835742819151786, + "learning_rate": 0.00018167528439968177, + "loss": 1.9026, + "step": 5153 + }, + { + "epoch": 3.3273079406068433, + "grad_norm": 1.2987125728795064, + "learning_rate": 0.00018161213198446174, + "loss": 1.6705, + "step": 5154 + }, + { + "epoch": 3.3279535183989672, + "grad_norm": 1.3746924955456334, + "learning_rate": 0.0001815489737048631, + "loss": 1.84, + "step": 5155 + }, + { + "epoch": 3.328599096191091, + "grad_norm": 1.3659716395428743, + "learning_rate": 0.00018148580957260224, + "loss": 1.6838, + "step": 5156 + }, + { + "epoch": 3.329244673983215, + "grad_norm": 1.2873712084278173, + "learning_rate": 0.0001814226395993968, + "loss": 1.4603, + "step": 5157 + }, + { + "epoch": 3.329890251775339, + "grad_norm": 1.460980869610815, + "learning_rate": 0.00018135946379696546, + "loss": 1.927, + "step": 5158 + }, + { + "epoch": 3.330535829567463, + "grad_norm": 1.4784864440802978, + "learning_rate": 0.00018129628217702806, + "loss": 1.7853, + "step": 5159 + }, + { + "epoch": 3.331181407359587, + "grad_norm": 1.3965960979081453, + "learning_rate": 0.0001812330947513053, + "loss": 1.6858, + "step": 5160 + }, + { + "epoch": 3.331826985151711, + "grad_norm": 1.506145888475864, + "learning_rate": 0.0001811699015315192, + "loss": 1.6773, + "step": 5161 + }, + { + "epoch": 3.3324725629438348, + "grad_norm": 1.2126230695463178, + "learning_rate": 0.00018110670252939278, + "loss": 1.2961, + "step": 5162 + }, + { + "epoch": 3.3331181407359587, + "grad_norm": 1.433100015274418, + "learning_rate": 0.0001810434977566501, + "loss": 1.717, + "step": 5163 + }, + { + "epoch": 3.3337637185280826, + "grad_norm": 1.3326627311112558, + "learning_rate": 0.00018098028722501625, + "loss": 1.5131, + "step": 5164 + }, + { + "epoch": 3.3344092963202066, + "grad_norm": 1.2292023888456458, + "learning_rate": 0.00018091707094621746, + "loss": 1.3829, + "step": 5165 + }, + { + "epoch": 3.3350548741123305, + "grad_norm": 1.513244132107499, + "learning_rate": 0.00018085384893198103, + "loss": 1.8965, + "step": 5166 + }, + { + "epoch": 3.3357004519044544, + "grad_norm": 1.4674081293695882, + "learning_rate": 0.00018079062119403527, + "loss": 1.7795, + "step": 5167 + }, + { + "epoch": 3.3363460296965783, + "grad_norm": 1.426786364927574, + "learning_rate": 0.0001807273877441096, + "loss": 1.7477, + "step": 5168 + }, + { + "epoch": 3.3369916074887023, + "grad_norm": 1.541590710048234, + "learning_rate": 0.00018066414859393444, + "loss": 1.8506, + "step": 5169 + }, + { + "epoch": 3.337637185280826, + "grad_norm": 1.5291554024746794, + "learning_rate": 0.00018060090375524146, + "loss": 1.6134, + "step": 5170 + }, + { + "epoch": 3.33828276307295, + "grad_norm": 1.353119609352784, + "learning_rate": 0.00018053765323976302, + "loss": 1.658, + "step": 5171 + }, + { + "epoch": 3.338928340865074, + "grad_norm": 1.3803227322759328, + "learning_rate": 0.00018047439705923287, + "loss": 1.8045, + "step": 5172 + }, + { + "epoch": 3.339573918657198, + "grad_norm": 1.331201767225124, + "learning_rate": 0.0001804111352253857, + "loss": 1.7743, + "step": 5173 + }, + { + "epoch": 3.340219496449322, + "grad_norm": 1.3920553882701332, + "learning_rate": 0.00018034786774995718, + "loss": 1.9257, + "step": 5174 + }, + { + "epoch": 3.340865074241446, + "grad_norm": 1.3909618688122076, + "learning_rate": 0.00018028459464468414, + "loss": 1.6218, + "step": 5175 + }, + { + "epoch": 3.3415106520335702, + "grad_norm": 1.3356634014055624, + "learning_rate": 0.0001802213159213043, + "loss": 1.7386, + "step": 5176 + }, + { + "epoch": 3.342156229825694, + "grad_norm": 1.2426576751563805, + "learning_rate": 0.00018015803159155666, + "loss": 1.5825, + "step": 5177 + }, + { + "epoch": 3.342801807617818, + "grad_norm": 1.8276069879503758, + "learning_rate": 0.00018009474166718099, + "loss": 1.6942, + "step": 5178 + }, + { + "epoch": 3.343447385409942, + "grad_norm": 1.4119370032303178, + "learning_rate": 0.0001800314461599183, + "loss": 1.7328, + "step": 5179 + }, + { + "epoch": 3.344092963202066, + "grad_norm": 1.6566824554242137, + "learning_rate": 0.00017996814508151058, + "loss": 1.8781, + "step": 5180 + }, + { + "epoch": 3.34473854099419, + "grad_norm": 1.214503744430565, + "learning_rate": 0.00017990483844370077, + "loss": 1.4499, + "step": 5181 + }, + { + "epoch": 3.345384118786314, + "grad_norm": 1.5444598753518748, + "learning_rate": 0.00017984152625823293, + "loss": 1.782, + "step": 5182 + }, + { + "epoch": 3.3460296965784377, + "grad_norm": 1.2781355470403777, + "learning_rate": 0.00017977820853685213, + "loss": 1.3269, + "step": 5183 + }, + { + "epoch": 3.3466752743705617, + "grad_norm": 1.4839687238190165, + "learning_rate": 0.00017971488529130457, + "loss": 1.898, + "step": 5184 + }, + { + "epoch": 3.3473208521626856, + "grad_norm": 1.4866223436388861, + "learning_rate": 0.0001796515565333372, + "loss": 1.6893, + "step": 5185 + }, + { + "epoch": 3.3479664299548095, + "grad_norm": 1.4407900120862251, + "learning_rate": 0.00017958822227469827, + "loss": 1.8502, + "step": 5186 + }, + { + "epoch": 3.3486120077469335, + "grad_norm": 1.5004406769496121, + "learning_rate": 0.00017952488252713687, + "loss": 1.6092, + "step": 5187 + }, + { + "epoch": 3.3492575855390574, + "grad_norm": 1.3429749560043662, + "learning_rate": 0.00017946153730240323, + "loss": 1.2571, + "step": 5188 + }, + { + "epoch": 3.3499031633311813, + "grad_norm": 1.3256673181213154, + "learning_rate": 0.0001793981866122486, + "loss": 1.4315, + "step": 5189 + }, + { + "epoch": 3.3505487411233053, + "grad_norm": 1.515066403837328, + "learning_rate": 0.00017933483046842508, + "loss": 1.6772, + "step": 5190 + }, + { + "epoch": 3.351194318915429, + "grad_norm": 1.315975066540157, + "learning_rate": 0.0001792714688826859, + "loss": 1.5936, + "step": 5191 + }, + { + "epoch": 3.351839896707553, + "grad_norm": 1.3805675256441095, + "learning_rate": 0.0001792081018667854, + "loss": 1.6229, + "step": 5192 + }, + { + "epoch": 3.352485474499677, + "grad_norm": 1.4988566294056882, + "learning_rate": 0.0001791447294324787, + "loss": 1.6615, + "step": 5193 + }, + { + "epoch": 3.353131052291801, + "grad_norm": 1.394770470094806, + "learning_rate": 0.0001790813515915221, + "loss": 1.7212, + "step": 5194 + }, + { + "epoch": 3.3537766300839253, + "grad_norm": 1.3298969712390816, + "learning_rate": 0.00017901796835567295, + "loss": 1.761, + "step": 5195 + }, + { + "epoch": 3.3544222078760493, + "grad_norm": 1.5508445357404017, + "learning_rate": 0.00017895457973668929, + "loss": 1.6991, + "step": 5196 + }, + { + "epoch": 3.355067785668173, + "grad_norm": 1.213906423485287, + "learning_rate": 0.00017889118574633046, + "loss": 1.4702, + "step": 5197 + }, + { + "epoch": 3.355713363460297, + "grad_norm": 1.3145069915795782, + "learning_rate": 0.00017882778639635675, + "loss": 1.6975, + "step": 5198 + }, + { + "epoch": 3.356358941252421, + "grad_norm": 1.4428843019215012, + "learning_rate": 0.00017876438169852935, + "loss": 1.6711, + "step": 5199 + }, + { + "epoch": 3.357004519044545, + "grad_norm": 1.4436938068215301, + "learning_rate": 0.00017870097166461051, + "loss": 1.7147, + "step": 5200 + }, + { + "epoch": 3.357650096836669, + "grad_norm": 1.5617577407960714, + "learning_rate": 0.00017863755630636346, + "loss": 1.8311, + "step": 5201 + }, + { + "epoch": 3.358295674628793, + "grad_norm": 1.2752677345993737, + "learning_rate": 0.00017857413563555238, + "loss": 1.6061, + "step": 5202 + }, + { + "epoch": 3.358941252420917, + "grad_norm": 1.492871848713012, + "learning_rate": 0.00017851070966394246, + "loss": 1.8638, + "step": 5203 + }, + { + "epoch": 3.3595868302130407, + "grad_norm": 1.420459476804278, + "learning_rate": 0.00017844727840329985, + "loss": 1.7394, + "step": 5204 + }, + { + "epoch": 3.3602324080051647, + "grad_norm": 1.4515525800450113, + "learning_rate": 0.0001783838418653918, + "loss": 1.6245, + "step": 5205 + }, + { + "epoch": 3.3608779857972886, + "grad_norm": 1.3358215804160523, + "learning_rate": 0.00017832040006198637, + "loss": 1.6046, + "step": 5206 + }, + { + "epoch": 3.3615235635894125, + "grad_norm": 1.5393344140591374, + "learning_rate": 0.00017825695300485267, + "loss": 1.6986, + "step": 5207 + }, + { + "epoch": 3.3621691413815364, + "grad_norm": 1.4588285831581533, + "learning_rate": 0.00017819350070576086, + "loss": 1.7393, + "step": 5208 + }, + { + "epoch": 3.3628147191736604, + "grad_norm": 1.5811045063893139, + "learning_rate": 0.00017813004317648195, + "loss": 1.7114, + "step": 5209 + }, + { + "epoch": 3.3634602969657843, + "grad_norm": 1.3885573341671185, + "learning_rate": 0.0001780665804287879, + "loss": 1.6924, + "step": 5210 + }, + { + "epoch": 3.3641058747579082, + "grad_norm": 1.4852300231488416, + "learning_rate": 0.00017800311247445183, + "loss": 1.7912, + "step": 5211 + }, + { + "epoch": 3.364751452550032, + "grad_norm": 1.346624187016967, + "learning_rate": 0.00017793963932524767, + "loss": 1.7302, + "step": 5212 + }, + { + "epoch": 3.365397030342156, + "grad_norm": 1.4743502993840538, + "learning_rate": 0.00017787616099295032, + "loss": 1.7899, + "step": 5213 + }, + { + "epoch": 3.36604260813428, + "grad_norm": 1.3143927788746794, + "learning_rate": 0.0001778126774893357, + "loss": 1.4534, + "step": 5214 + }, + { + "epoch": 3.366688185926404, + "grad_norm": 1.3944555035526578, + "learning_rate": 0.0001777491888261806, + "loss": 1.7805, + "step": 5215 + }, + { + "epoch": 3.367333763718528, + "grad_norm": 1.5060214383361281, + "learning_rate": 0.00017768569501526292, + "loss": 1.8022, + "step": 5216 + }, + { + "epoch": 3.367979341510652, + "grad_norm": 1.314314388279859, + "learning_rate": 0.00017762219606836136, + "loss": 1.3936, + "step": 5217 + }, + { + "epoch": 3.3686249193027757, + "grad_norm": 1.4230568300458206, + "learning_rate": 0.0001775586919972556, + "loss": 1.7851, + "step": 5218 + }, + { + "epoch": 3.3692704970949, + "grad_norm": 1.3604095565772163, + "learning_rate": 0.00017749518281372638, + "loss": 1.7121, + "step": 5219 + }, + { + "epoch": 3.369916074887024, + "grad_norm": 1.4715297554222233, + "learning_rate": 0.0001774316685295553, + "loss": 2.0291, + "step": 5220 + }, + { + "epoch": 3.370561652679148, + "grad_norm": 1.3516677045147125, + "learning_rate": 0.00017736814915652485, + "loss": 1.7174, + "step": 5221 + }, + { + "epoch": 3.371207230471272, + "grad_norm": 1.4175898736790706, + "learning_rate": 0.00017730462470641863, + "loss": 1.672, + "step": 5222 + }, + { + "epoch": 3.371852808263396, + "grad_norm": 1.4078949657179813, + "learning_rate": 0.00017724109519102102, + "loss": 1.8447, + "step": 5223 + }, + { + "epoch": 3.3724983860555198, + "grad_norm": 1.3349481091759272, + "learning_rate": 0.00017717756062211743, + "loss": 1.8221, + "step": 5224 + }, + { + "epoch": 3.3731439638476437, + "grad_norm": 1.3685205639622138, + "learning_rate": 0.00017711402101149417, + "loss": 1.6613, + "step": 5225 + }, + { + "epoch": 3.3737895416397676, + "grad_norm": 1.1882487582109882, + "learning_rate": 0.0001770504763709385, + "loss": 1.3565, + "step": 5226 + }, + { + "epoch": 3.3744351194318916, + "grad_norm": 1.4696189217419229, + "learning_rate": 0.00017698692671223862, + "loss": 1.8474, + "step": 5227 + }, + { + "epoch": 3.3750806972240155, + "grad_norm": 1.5376448087523304, + "learning_rate": 0.00017692337204718364, + "loss": 1.9709, + "step": 5228 + }, + { + "epoch": 3.3757262750161394, + "grad_norm": 1.3491889869756495, + "learning_rate": 0.0001768598123875636, + "loss": 1.6334, + "step": 5229 + }, + { + "epoch": 3.3763718528082634, + "grad_norm": 1.5972294606275779, + "learning_rate": 0.00017679624774516955, + "loss": 1.7538, + "step": 5230 + }, + { + "epoch": 3.3770174306003873, + "grad_norm": 1.2944976408390252, + "learning_rate": 0.00017673267813179326, + "loss": 1.4679, + "step": 5231 + }, + { + "epoch": 3.377663008392511, + "grad_norm": 1.4725654505760206, + "learning_rate": 0.00017666910355922767, + "loss": 1.7683, + "step": 5232 + }, + { + "epoch": 3.378308586184635, + "grad_norm": 1.50913442682841, + "learning_rate": 0.00017660552403926647, + "loss": 1.7592, + "step": 5233 + }, + { + "epoch": 3.378954163976759, + "grad_norm": 1.559395075638342, + "learning_rate": 0.00017654193958370433, + "loss": 1.8094, + "step": 5234 + }, + { + "epoch": 3.379599741768883, + "grad_norm": 1.5435969057207488, + "learning_rate": 0.00017647835020433682, + "loss": 1.9039, + "step": 5235 + }, + { + "epoch": 3.380245319561007, + "grad_norm": 1.3762812721770372, + "learning_rate": 0.00017641475591296044, + "loss": 1.5203, + "step": 5236 + }, + { + "epoch": 3.380890897353131, + "grad_norm": 1.3826525958847165, + "learning_rate": 0.0001763511567213726, + "loss": 1.6673, + "step": 5237 + }, + { + "epoch": 3.3815364751452552, + "grad_norm": 1.302548688756577, + "learning_rate": 0.0001762875526413716, + "loss": 1.459, + "step": 5238 + }, + { + "epoch": 3.382182052937379, + "grad_norm": 1.5126895735463115, + "learning_rate": 0.00017622394368475665, + "loss": 1.7407, + "step": 5239 + }, + { + "epoch": 3.382827630729503, + "grad_norm": 1.354973656659431, + "learning_rate": 0.0001761603298633279, + "loss": 1.6383, + "step": 5240 + }, + { + "epoch": 3.383473208521627, + "grad_norm": 1.3737162624973491, + "learning_rate": 0.0001760967111888863, + "loss": 1.7425, + "step": 5241 + }, + { + "epoch": 3.384118786313751, + "grad_norm": 1.3567162884960207, + "learning_rate": 0.00017603308767323388, + "loss": 1.7075, + "step": 5242 + }, + { + "epoch": 3.384764364105875, + "grad_norm": 1.4589862757158556, + "learning_rate": 0.00017596945932817334, + "loss": 1.533, + "step": 5243 + }, + { + "epoch": 3.385409941897999, + "grad_norm": 1.3932444278041145, + "learning_rate": 0.0001759058261655085, + "loss": 1.6462, + "step": 5244 + }, + { + "epoch": 3.3860555196901228, + "grad_norm": 1.3506195024765104, + "learning_rate": 0.000175842188197044, + "loss": 1.6422, + "step": 5245 + }, + { + "epoch": 3.3867010974822467, + "grad_norm": 1.4264245296359286, + "learning_rate": 0.00017577854543458522, + "loss": 1.6249, + "step": 5246 + }, + { + "epoch": 3.3873466752743706, + "grad_norm": 1.3824850738563204, + "learning_rate": 0.00017571489788993863, + "loss": 1.5904, + "step": 5247 + }, + { + "epoch": 3.3879922530664945, + "grad_norm": 1.3626321230551892, + "learning_rate": 0.0001756512455749115, + "loss": 1.6254, + "step": 5248 + }, + { + "epoch": 3.3886378308586185, + "grad_norm": 1.3436000240026877, + "learning_rate": 0.00017558758850131202, + "loss": 1.5265, + "step": 5249 + }, + { + "epoch": 3.3892834086507424, + "grad_norm": 1.5500394898107428, + "learning_rate": 0.0001755239266809492, + "loss": 1.7695, + "step": 5250 + }, + { + "epoch": 3.3899289864428663, + "grad_norm": 1.4395549862368942, + "learning_rate": 0.00017546026012563298, + "loss": 1.668, + "step": 5251 + }, + { + "epoch": 3.3905745642349903, + "grad_norm": 1.4004061421142189, + "learning_rate": 0.00017539658884717415, + "loss": 1.6501, + "step": 5252 + }, + { + "epoch": 3.391220142027114, + "grad_norm": 1.4578736036190068, + "learning_rate": 0.00017533291285738443, + "loss": 1.5739, + "step": 5253 + }, + { + "epoch": 3.391865719819238, + "grad_norm": 1.5787106506003843, + "learning_rate": 0.00017526923216807633, + "loss": 1.7003, + "step": 5254 + }, + { + "epoch": 3.392511297611362, + "grad_norm": 1.3860900562959206, + "learning_rate": 0.0001752055467910633, + "loss": 1.6248, + "step": 5255 + }, + { + "epoch": 3.393156875403486, + "grad_norm": 1.342880840040686, + "learning_rate": 0.00017514185673815971, + "loss": 1.5413, + "step": 5256 + }, + { + "epoch": 3.39380245319561, + "grad_norm": 1.3238195186841502, + "learning_rate": 0.0001750781620211806, + "loss": 1.5155, + "step": 5257 + }, + { + "epoch": 3.394448030987734, + "grad_norm": 1.3197089917405518, + "learning_rate": 0.00017501446265194207, + "loss": 1.5438, + "step": 5258 + }, + { + "epoch": 3.3950936087798578, + "grad_norm": 1.3957201043096208, + "learning_rate": 0.00017495075864226107, + "loss": 1.5376, + "step": 5259 + }, + { + "epoch": 3.3957391865719817, + "grad_norm": 1.6207666347899734, + "learning_rate": 0.00017488705000395522, + "loss": 1.7144, + "step": 5260 + }, + { + "epoch": 3.3963847643641056, + "grad_norm": 1.3291786816224356, + "learning_rate": 0.00017482333674884322, + "loss": 1.5113, + "step": 5261 + }, + { + "epoch": 3.39703034215623, + "grad_norm": 1.4078449872385073, + "learning_rate": 0.00017475961888874456, + "loss": 1.5918, + "step": 5262 + }, + { + "epoch": 3.397675919948354, + "grad_norm": 1.5954319258841192, + "learning_rate": 0.0001746958964354795, + "loss": 1.6881, + "step": 5263 + }, + { + "epoch": 3.398321497740478, + "grad_norm": 1.3077811454552652, + "learning_rate": 0.00017463216940086926, + "loss": 1.5542, + "step": 5264 + }, + { + "epoch": 3.398967075532602, + "grad_norm": 1.3372942779419854, + "learning_rate": 0.00017456843779673585, + "loss": 1.5339, + "step": 5265 + }, + { + "epoch": 3.3996126533247257, + "grad_norm": 1.4918211155326593, + "learning_rate": 0.00017450470163490216, + "loss": 1.8071, + "step": 5266 + }, + { + "epoch": 3.4002582311168497, + "grad_norm": 1.5607150293275687, + "learning_rate": 0.00017444096092719187, + "loss": 1.8264, + "step": 5267 + }, + { + "epoch": 3.4009038089089736, + "grad_norm": 1.4521515332318025, + "learning_rate": 0.00017437721568542956, + "loss": 1.9833, + "step": 5268 + }, + { + "epoch": 3.4015493867010975, + "grad_norm": 1.399378154504957, + "learning_rate": 0.0001743134659214407, + "loss": 1.8702, + "step": 5269 + }, + { + "epoch": 3.4021949644932215, + "grad_norm": 1.174274593040175, + "learning_rate": 0.0001742497116470515, + "loss": 1.3681, + "step": 5270 + }, + { + "epoch": 3.4028405422853454, + "grad_norm": 1.5105424265736158, + "learning_rate": 0.00017418595287408892, + "loss": 1.7503, + "step": 5271 + }, + { + "epoch": 3.4034861200774693, + "grad_norm": 1.3932325962193501, + "learning_rate": 0.00017412218961438102, + "loss": 1.7222, + "step": 5272 + }, + { + "epoch": 3.4041316978695932, + "grad_norm": 1.4540327550351082, + "learning_rate": 0.00017405842187975652, + "loss": 1.4927, + "step": 5273 + }, + { + "epoch": 3.404777275661717, + "grad_norm": 1.2583964134152914, + "learning_rate": 0.00017399464968204497, + "loss": 1.477, + "step": 5274 + }, + { + "epoch": 3.405422853453841, + "grad_norm": 1.3816487826917194, + "learning_rate": 0.00017393087303307678, + "loss": 1.7076, + "step": 5275 + }, + { + "epoch": 3.406068431245965, + "grad_norm": 1.3087437023801993, + "learning_rate": 0.00017386709194468317, + "loss": 1.4391, + "step": 5276 + }, + { + "epoch": 3.406714009038089, + "grad_norm": 1.419046048439028, + "learning_rate": 0.0001738033064286962, + "loss": 1.6224, + "step": 5277 + }, + { + "epoch": 3.407359586830213, + "grad_norm": 1.4187670786413074, + "learning_rate": 0.00017373951649694876, + "loss": 1.5453, + "step": 5278 + }, + { + "epoch": 3.408005164622337, + "grad_norm": 1.514316980627338, + "learning_rate": 0.00017367572216127453, + "loss": 1.7087, + "step": 5279 + }, + { + "epoch": 3.4086507424144608, + "grad_norm": 1.4585682978837116, + "learning_rate": 0.00017361192343350802, + "loss": 1.8805, + "step": 5280 + }, + { + "epoch": 3.409296320206585, + "grad_norm": 1.3288964563163308, + "learning_rate": 0.00017354812032548461, + "loss": 1.4476, + "step": 5281 + }, + { + "epoch": 3.409941897998709, + "grad_norm": 1.584909087935521, + "learning_rate": 0.00017348431284904033, + "loss": 1.9057, + "step": 5282 + }, + { + "epoch": 3.410587475790833, + "grad_norm": 1.450168122410224, + "learning_rate": 0.00017342050101601224, + "loss": 1.4977, + "step": 5283 + }, + { + "epoch": 3.411233053582957, + "grad_norm": 1.4059547126449394, + "learning_rate": 0.00017335668483823806, + "loss": 1.6385, + "step": 5284 + }, + { + "epoch": 3.411878631375081, + "grad_norm": 1.3708691375545008, + "learning_rate": 0.0001732928643275563, + "loss": 1.512, + "step": 5285 + }, + { + "epoch": 3.412524209167205, + "grad_norm": 1.3015470949588868, + "learning_rate": 0.00017322903949580637, + "loss": 1.3778, + "step": 5286 + }, + { + "epoch": 3.4131697869593287, + "grad_norm": 1.4545514012475886, + "learning_rate": 0.0001731652103548285, + "loss": 1.7073, + "step": 5287 + }, + { + "epoch": 3.4138153647514526, + "grad_norm": 1.521836451633547, + "learning_rate": 0.00017310137691646353, + "loss": 1.7725, + "step": 5288 + }, + { + "epoch": 3.4144609425435766, + "grad_norm": 1.4248344249029428, + "learning_rate": 0.00017303753919255333, + "loss": 1.6821, + "step": 5289 + }, + { + "epoch": 3.4151065203357005, + "grad_norm": 1.3791629945803072, + "learning_rate": 0.0001729736971949404, + "loss": 1.6972, + "step": 5290 + }, + { + "epoch": 3.4157520981278244, + "grad_norm": 1.5157911194321259, + "learning_rate": 0.00017290985093546815, + "loss": 1.5894, + "step": 5291 + }, + { + "epoch": 3.4163976759199484, + "grad_norm": 1.4038964345332876, + "learning_rate": 0.00017284600042598067, + "loss": 1.6677, + "step": 5292 + }, + { + "epoch": 3.4170432537120723, + "grad_norm": 1.419680942008455, + "learning_rate": 0.00017278214567832288, + "loss": 1.8236, + "step": 5293 + }, + { + "epoch": 3.4176888315041962, + "grad_norm": 1.7681117645268636, + "learning_rate": 0.00017271828670434056, + "loss": 1.9498, + "step": 5294 + }, + { + "epoch": 3.41833440929632, + "grad_norm": 1.4129634159849893, + "learning_rate": 0.00017265442351588025, + "loss": 1.6611, + "step": 5295 + }, + { + "epoch": 3.418979987088444, + "grad_norm": 1.562786737488883, + "learning_rate": 0.00017259055612478907, + "loss": 1.904, + "step": 5296 + }, + { + "epoch": 3.419625564880568, + "grad_norm": 1.383945234664945, + "learning_rate": 0.00017252668454291524, + "loss": 1.6904, + "step": 5297 + }, + { + "epoch": 3.420271142672692, + "grad_norm": 1.6209671301650896, + "learning_rate": 0.00017246280878210755, + "loss": 1.6894, + "step": 5298 + }, + { + "epoch": 3.420916720464816, + "grad_norm": 1.5158683542358617, + "learning_rate": 0.0001723989288542156, + "loss": 1.6747, + "step": 5299 + }, + { + "epoch": 3.42156229825694, + "grad_norm": 1.3837777708062011, + "learning_rate": 0.0001723350447710898, + "loss": 1.7102, + "step": 5300 + }, + { + "epoch": 3.4222078760490637, + "grad_norm": 1.4975773226875422, + "learning_rate": 0.00017227115654458127, + "loss": 1.8731, + "step": 5301 + }, + { + "epoch": 3.4228534538411877, + "grad_norm": 1.4693914864863529, + "learning_rate": 0.000172207264186542, + "loss": 1.7466, + "step": 5302 + }, + { + "epoch": 3.4234990316333116, + "grad_norm": 1.3642367589855886, + "learning_rate": 0.00017214336770882466, + "loss": 1.6312, + "step": 5303 + }, + { + "epoch": 3.4241446094254355, + "grad_norm": 1.3092854545102262, + "learning_rate": 0.00017207946712328266, + "loss": 1.5687, + "step": 5304 + }, + { + "epoch": 3.42479018721756, + "grad_norm": 1.6051762210116953, + "learning_rate": 0.00017201556244177032, + "loss": 1.8472, + "step": 5305 + }, + { + "epoch": 3.425435765009684, + "grad_norm": 1.4501686121463067, + "learning_rate": 0.0001719516536761426, + "loss": 1.7112, + "step": 5306 + }, + { + "epoch": 3.4260813428018078, + "grad_norm": 1.388694959542572, + "learning_rate": 0.00017188774083825512, + "loss": 1.6139, + "step": 5307 + }, + { + "epoch": 3.4267269205939317, + "grad_norm": 1.9685195357214769, + "learning_rate": 0.00017182382393996453, + "loss": 1.6584, + "step": 5308 + }, + { + "epoch": 3.4273724983860556, + "grad_norm": 1.39720158955731, + "learning_rate": 0.000171759902993128, + "loss": 1.5646, + "step": 5309 + }, + { + "epoch": 3.4280180761781796, + "grad_norm": 1.6314970442728132, + "learning_rate": 0.00017169597800960356, + "loss": 2.0253, + "step": 5310 + }, + { + "epoch": 3.4286636539703035, + "grad_norm": 1.3260110707122839, + "learning_rate": 0.00017163204900124993, + "loss": 1.6129, + "step": 5311 + }, + { + "epoch": 3.4293092317624274, + "grad_norm": 1.4336615965609993, + "learning_rate": 0.00017156811597992663, + "loss": 1.6246, + "step": 5312 + }, + { + "epoch": 3.4299548095545513, + "grad_norm": 1.5266505847543683, + "learning_rate": 0.00017150417895749387, + "loss": 1.778, + "step": 5313 + }, + { + "epoch": 3.4306003873466753, + "grad_norm": 1.4226783445119198, + "learning_rate": 0.00017144023794581268, + "loss": 1.6511, + "step": 5314 + }, + { + "epoch": 3.431245965138799, + "grad_norm": 1.3304460563706595, + "learning_rate": 0.00017137629295674469, + "loss": 1.4462, + "step": 5315 + }, + { + "epoch": 3.431891542930923, + "grad_norm": 1.3170285494893268, + "learning_rate": 0.0001713123440021525, + "loss": 1.7406, + "step": 5316 + }, + { + "epoch": 3.432537120723047, + "grad_norm": 1.5020821274151033, + "learning_rate": 0.00017124839109389917, + "loss": 1.672, + "step": 5317 + }, + { + "epoch": 3.433182698515171, + "grad_norm": 1.4694623501932047, + "learning_rate": 0.00017118443424384862, + "loss": 1.7435, + "step": 5318 + }, + { + "epoch": 3.433828276307295, + "grad_norm": 1.5101835563294603, + "learning_rate": 0.00017112047346386566, + "loss": 1.7224, + "step": 5319 + }, + { + "epoch": 3.434473854099419, + "grad_norm": 1.4581943434954963, + "learning_rate": 0.0001710565087658156, + "loss": 1.9884, + "step": 5320 + }, + { + "epoch": 3.435119431891543, + "grad_norm": 1.3663939971563719, + "learning_rate": 0.0001709925401615645, + "loss": 1.6254, + "step": 5321 + }, + { + "epoch": 3.4357650096836667, + "grad_norm": 1.3844933260857388, + "learning_rate": 0.00017092856766297925, + "loss": 1.7892, + "step": 5322 + }, + { + "epoch": 3.4364105874757906, + "grad_norm": 1.3144754655985715, + "learning_rate": 0.00017086459128192743, + "loss": 1.743, + "step": 5323 + }, + { + "epoch": 3.437056165267915, + "grad_norm": 1.3588834646941812, + "learning_rate": 0.0001708006110302773, + "loss": 1.6822, + "step": 5324 + }, + { + "epoch": 3.437701743060039, + "grad_norm": 1.5327765460588245, + "learning_rate": 0.00017073662691989786, + "loss": 1.8311, + "step": 5325 + }, + { + "epoch": 3.438347320852163, + "grad_norm": 1.3966318835977696, + "learning_rate": 0.00017067263896265885, + "loss": 1.7806, + "step": 5326 + }, + { + "epoch": 3.438992898644287, + "grad_norm": 1.4173130045753801, + "learning_rate": 0.00017060864717043063, + "loss": 1.6665, + "step": 5327 + }, + { + "epoch": 3.4396384764364107, + "grad_norm": 1.3659410542826862, + "learning_rate": 0.00017054465155508442, + "loss": 1.5649, + "step": 5328 + }, + { + "epoch": 3.4402840542285347, + "grad_norm": 1.2255242397536517, + "learning_rate": 0.000170480652128492, + "loss": 1.392, + "step": 5329 + }, + { + "epoch": 3.4409296320206586, + "grad_norm": 1.3122215476219787, + "learning_rate": 0.000170416648902526, + "loss": 1.7714, + "step": 5330 + }, + { + "epoch": 3.4415752098127825, + "grad_norm": 1.1739313138241556, + "learning_rate": 0.00017035264188905973, + "loss": 1.3231, + "step": 5331 + }, + { + "epoch": 3.4422207876049065, + "grad_norm": 1.6028687388243752, + "learning_rate": 0.00017028863109996698, + "loss": 1.8212, + "step": 5332 + }, + { + "epoch": 3.4428663653970304, + "grad_norm": 1.486339912133202, + "learning_rate": 0.00017022461654712256, + "loss": 1.7926, + "step": 5333 + }, + { + "epoch": 3.4435119431891543, + "grad_norm": 1.7287400075182868, + "learning_rate": 0.00017016059824240178, + "loss": 1.8711, + "step": 5334 + }, + { + "epoch": 3.4441575209812783, + "grad_norm": 1.4158861101017846, + "learning_rate": 0.00017009657619768072, + "loss": 1.5754, + "step": 5335 + }, + { + "epoch": 3.444803098773402, + "grad_norm": 1.1873774779182913, + "learning_rate": 0.0001700325504248361, + "loss": 1.5314, + "step": 5336 + }, + { + "epoch": 3.445448676565526, + "grad_norm": 1.547616253997427, + "learning_rate": 0.0001699685209357454, + "loss": 1.9288, + "step": 5337 + }, + { + "epoch": 3.44609425435765, + "grad_norm": 1.490038485798821, + "learning_rate": 0.0001699044877422868, + "loss": 1.8601, + "step": 5338 + }, + { + "epoch": 3.446739832149774, + "grad_norm": 1.4077459772937775, + "learning_rate": 0.00016984045085633898, + "loss": 1.808, + "step": 5339 + }, + { + "epoch": 3.447385409941898, + "grad_norm": 1.1171518044847875, + "learning_rate": 0.0001697764102897816, + "loss": 1.3664, + "step": 5340 + }, + { + "epoch": 3.448030987734022, + "grad_norm": 1.4036706674896917, + "learning_rate": 0.00016971236605449474, + "loss": 1.6247, + "step": 5341 + }, + { + "epoch": 3.4486765655261458, + "grad_norm": 1.4864094011947653, + "learning_rate": 0.00016964831816235933, + "loss": 1.7621, + "step": 5342 + }, + { + "epoch": 3.4493221433182697, + "grad_norm": 1.3713086567413932, + "learning_rate": 0.0001695842666252569, + "loss": 1.6334, + "step": 5343 + }, + { + "epoch": 3.4499677211103936, + "grad_norm": 1.4582925477403303, + "learning_rate": 0.0001695202114550697, + "loss": 1.8679, + "step": 5344 + }, + { + "epoch": 3.4506132989025176, + "grad_norm": 1.5054687565466551, + "learning_rate": 0.00016945615266368056, + "loss": 1.7421, + "step": 5345 + }, + { + "epoch": 3.4512588766946415, + "grad_norm": 1.4475845812456196, + "learning_rate": 0.00016939209026297313, + "loss": 1.7387, + "step": 5346 + }, + { + "epoch": 3.4519044544867654, + "grad_norm": 1.5425680474087078, + "learning_rate": 0.00016932802426483163, + "loss": 1.6663, + "step": 5347 + }, + { + "epoch": 3.45255003227889, + "grad_norm": 1.2323112830844731, + "learning_rate": 0.00016926395468114095, + "loss": 1.4093, + "step": 5348 + }, + { + "epoch": 3.4531956100710137, + "grad_norm": 1.2603919063167468, + "learning_rate": 0.00016919988152378667, + "loss": 1.2835, + "step": 5349 + }, + { + "epoch": 3.4538411878631377, + "grad_norm": 1.4921484096308397, + "learning_rate": 0.00016913580480465502, + "loss": 1.666, + "step": 5350 + }, + { + "epoch": 3.4544867656552616, + "grad_norm": 1.612058767775562, + "learning_rate": 0.0001690717245356329, + "loss": 1.9043, + "step": 5351 + }, + { + "epoch": 3.4551323434473855, + "grad_norm": 1.3794109814345537, + "learning_rate": 0.00016900764072860788, + "loss": 1.5942, + "step": 5352 + }, + { + "epoch": 3.4557779212395094, + "grad_norm": 1.3578198542269744, + "learning_rate": 0.00016894355339546816, + "loss": 1.5909, + "step": 5353 + }, + { + "epoch": 3.4564234990316334, + "grad_norm": 1.4407728290439537, + "learning_rate": 0.00016887946254810256, + "loss": 1.7877, + "step": 5354 + }, + { + "epoch": 3.4570690768237573, + "grad_norm": 1.4660341810032083, + "learning_rate": 0.00016881536819840067, + "loss": 1.7566, + "step": 5355 + }, + { + "epoch": 3.4577146546158812, + "grad_norm": 1.3744077629701466, + "learning_rate": 0.00016875127035825268, + "loss": 1.6288, + "step": 5356 + }, + { + "epoch": 3.458360232408005, + "grad_norm": 1.3071071940008034, + "learning_rate": 0.0001686871690395493, + "loss": 1.682, + "step": 5357 + }, + { + "epoch": 3.459005810200129, + "grad_norm": 1.447230732140927, + "learning_rate": 0.00016862306425418207, + "loss": 1.913, + "step": 5358 + }, + { + "epoch": 3.459651387992253, + "grad_norm": 1.5165899902311428, + "learning_rate": 0.00016855895601404302, + "loss": 1.7019, + "step": 5359 + }, + { + "epoch": 3.460296965784377, + "grad_norm": 1.4403362613328867, + "learning_rate": 0.000168494844331025, + "loss": 1.8211, + "step": 5360 + }, + { + "epoch": 3.460942543576501, + "grad_norm": 1.4406560506939168, + "learning_rate": 0.00016843072921702133, + "loss": 1.7341, + "step": 5361 + }, + { + "epoch": 3.461588121368625, + "grad_norm": 1.2523865254739668, + "learning_rate": 0.000168366610683926, + "loss": 1.7327, + "step": 5362 + }, + { + "epoch": 3.4622336991607487, + "grad_norm": 1.361832329399447, + "learning_rate": 0.0001683024887436337, + "loss": 1.7352, + "step": 5363 + }, + { + "epoch": 3.4628792769528727, + "grad_norm": 1.420819443973236, + "learning_rate": 0.00016823836340803974, + "loss": 1.7582, + "step": 5364 + }, + { + "epoch": 3.4635248547449966, + "grad_norm": 1.4359326090500812, + "learning_rate": 0.00016817423468904, + "loss": 1.7533, + "step": 5365 + }, + { + "epoch": 3.4641704325371205, + "grad_norm": 1.413585591750598, + "learning_rate": 0.00016811010259853106, + "loss": 1.7796, + "step": 5366 + }, + { + "epoch": 3.464816010329245, + "grad_norm": 1.5807754608603242, + "learning_rate": 0.00016804596714841002, + "loss": 1.8392, + "step": 5367 + }, + { + "epoch": 3.465461588121369, + "grad_norm": 1.5020375112422932, + "learning_rate": 0.0001679818283505747, + "loss": 1.8223, + "step": 5368 + }, + { + "epoch": 3.4661071659134928, + "grad_norm": 1.2695145666763792, + "learning_rate": 0.00016791768621692357, + "loss": 1.4543, + "step": 5369 + }, + { + "epoch": 3.4667527437056167, + "grad_norm": 1.492750415837517, + "learning_rate": 0.00016785354075935562, + "loss": 1.6235, + "step": 5370 + }, + { + "epoch": 3.4673983214977406, + "grad_norm": 1.3123418934143152, + "learning_rate": 0.00016778939198977045, + "loss": 1.6538, + "step": 5371 + }, + { + "epoch": 3.4680438992898646, + "grad_norm": 1.3419982940796962, + "learning_rate": 0.0001677252399200684, + "loss": 1.7086, + "step": 5372 + }, + { + "epoch": 3.4686894770819885, + "grad_norm": 1.4488938890671297, + "learning_rate": 0.00016766108456215032, + "loss": 1.8955, + "step": 5373 + }, + { + "epoch": 3.4693350548741124, + "grad_norm": 1.3637006508261869, + "learning_rate": 0.00016759692592791767, + "loss": 1.6491, + "step": 5374 + }, + { + "epoch": 3.4699806326662364, + "grad_norm": 1.455340067454875, + "learning_rate": 0.00016753276402927257, + "loss": 1.7674, + "step": 5375 + }, + { + "epoch": 3.4706262104583603, + "grad_norm": 1.6340263820341063, + "learning_rate": 0.00016746859887811771, + "loss": 1.976, + "step": 5376 + }, + { + "epoch": 3.471271788250484, + "grad_norm": 1.278943133561275, + "learning_rate": 0.0001674044304863564, + "loss": 1.4202, + "step": 5377 + }, + { + "epoch": 3.471917366042608, + "grad_norm": 1.461248586571601, + "learning_rate": 0.00016734025886589254, + "loss": 1.6437, + "step": 5378 + }, + { + "epoch": 3.472562943834732, + "grad_norm": 1.5342470801822068, + "learning_rate": 0.00016727608402863057, + "loss": 1.9201, + "step": 5379 + }, + { + "epoch": 3.473208521626856, + "grad_norm": 1.4654915133949387, + "learning_rate": 0.00016721190598647568, + "loss": 1.7803, + "step": 5380 + }, + { + "epoch": 3.47385409941898, + "grad_norm": 1.3598222323640072, + "learning_rate": 0.00016714772475133358, + "loss": 1.7393, + "step": 5381 + }, + { + "epoch": 3.474499677211104, + "grad_norm": 1.473812105000984, + "learning_rate": 0.00016708354033511042, + "loss": 1.7357, + "step": 5382 + }, + { + "epoch": 3.475145255003228, + "grad_norm": 1.4609391159944984, + "learning_rate": 0.0001670193527497132, + "loss": 1.5984, + "step": 5383 + }, + { + "epoch": 3.4757908327953517, + "grad_norm": 1.5121197808497624, + "learning_rate": 0.00016695516200704937, + "loss": 1.707, + "step": 5384 + }, + { + "epoch": 3.4764364105874757, + "grad_norm": 1.6614566743112524, + "learning_rate": 0.0001668909681190269, + "loss": 1.9068, + "step": 5385 + }, + { + "epoch": 3.4770819883795996, + "grad_norm": 1.391984396399145, + "learning_rate": 0.0001668267710975545, + "loss": 1.6796, + "step": 5386 + }, + { + "epoch": 3.4777275661717235, + "grad_norm": 1.3584190387394353, + "learning_rate": 0.00016676257095454136, + "loss": 1.6628, + "step": 5387 + }, + { + "epoch": 3.4783731439638474, + "grad_norm": 1.3611756223256049, + "learning_rate": 0.00016669836770189726, + "loss": 1.5029, + "step": 5388 + }, + { + "epoch": 3.4790187217559714, + "grad_norm": 1.43544309519504, + "learning_rate": 0.00016663416135153262, + "loss": 1.7537, + "step": 5389 + }, + { + "epoch": 3.4796642995480953, + "grad_norm": 1.5603292138678653, + "learning_rate": 0.0001665699519153583, + "loss": 1.8103, + "step": 5390 + }, + { + "epoch": 3.4803098773402197, + "grad_norm": 1.502249921415115, + "learning_rate": 0.00016650573940528587, + "loss": 1.7112, + "step": 5391 + }, + { + "epoch": 3.4809554551323436, + "grad_norm": 1.6113119662833624, + "learning_rate": 0.00016644152383322745, + "loss": 1.8877, + "step": 5392 + }, + { + "epoch": 3.4816010329244675, + "grad_norm": 1.6256525918624865, + "learning_rate": 0.00016637730521109557, + "loss": 1.9406, + "step": 5393 + }, + { + "epoch": 3.4822466107165915, + "grad_norm": 1.526236091052034, + "learning_rate": 0.00016631308355080358, + "loss": 1.6862, + "step": 5394 + }, + { + "epoch": 3.4828921885087154, + "grad_norm": 1.326261589899667, + "learning_rate": 0.0001662488588642653, + "loss": 1.6606, + "step": 5395 + }, + { + "epoch": 3.4835377663008393, + "grad_norm": 1.4479154855150174, + "learning_rate": 0.00016618463116339487, + "loss": 1.6933, + "step": 5396 + }, + { + "epoch": 3.4841833440929633, + "grad_norm": 1.2455366000255002, + "learning_rate": 0.00016612040046010742, + "loss": 1.5905, + "step": 5397 + }, + { + "epoch": 3.484828921885087, + "grad_norm": 1.5197644291890073, + "learning_rate": 0.00016605616676631827, + "loss": 1.825, + "step": 5398 + }, + { + "epoch": 3.485474499677211, + "grad_norm": 1.4470526549078384, + "learning_rate": 0.0001659919300939435, + "loss": 1.7359, + "step": 5399 + }, + { + "epoch": 3.486120077469335, + "grad_norm": 1.3610845298947332, + "learning_rate": 0.00016592769045489967, + "loss": 1.6572, + "step": 5400 + }, + { + "epoch": 3.486765655261459, + "grad_norm": 1.3667654216070477, + "learning_rate": 0.00016586344786110387, + "loss": 1.6739, + "step": 5401 + }, + { + "epoch": 3.487411233053583, + "grad_norm": 1.4762380096718926, + "learning_rate": 0.0001657992023244738, + "loss": 1.6776, + "step": 5402 + }, + { + "epoch": 3.488056810845707, + "grad_norm": 1.4431239731441952, + "learning_rate": 0.00016573495385692764, + "loss": 1.6617, + "step": 5403 + }, + { + "epoch": 3.4887023886378308, + "grad_norm": 1.582004562441242, + "learning_rate": 0.0001656707024703842, + "loss": 1.7983, + "step": 5404 + }, + { + "epoch": 3.4893479664299547, + "grad_norm": 1.3393086134418193, + "learning_rate": 0.00016560644817676274, + "loss": 1.6174, + "step": 5405 + }, + { + "epoch": 3.4899935442220786, + "grad_norm": 1.617548619222722, + "learning_rate": 0.00016554219098798315, + "loss": 1.7693, + "step": 5406 + }, + { + "epoch": 3.4906391220142026, + "grad_norm": 1.3024999074636208, + "learning_rate": 0.0001654779309159657, + "loss": 1.4625, + "step": 5407 + }, + { + "epoch": 3.4912846998063265, + "grad_norm": 1.34989429780923, + "learning_rate": 0.00016541366797263141, + "loss": 1.6007, + "step": 5408 + }, + { + "epoch": 3.4919302775984504, + "grad_norm": 1.4230266389888613, + "learning_rate": 0.00016534940216990168, + "loss": 1.6354, + "step": 5409 + }, + { + "epoch": 3.492575855390575, + "grad_norm": 1.2847577374917267, + "learning_rate": 0.00016528513351969846, + "loss": 1.4763, + "step": 5410 + }, + { + "epoch": 3.4932214331826987, + "grad_norm": 1.4348463329088204, + "learning_rate": 0.0001652208620339443, + "loss": 1.6591, + "step": 5411 + }, + { + "epoch": 3.4938670109748227, + "grad_norm": 1.4217454777461618, + "learning_rate": 0.0001651565877245622, + "loss": 1.6719, + "step": 5412 + }, + { + "epoch": 3.4945125887669466, + "grad_norm": 1.4741758002544958, + "learning_rate": 0.0001650923106034757, + "loss": 1.6299, + "step": 5413 + }, + { + "epoch": 3.4951581665590705, + "grad_norm": 1.432821188072596, + "learning_rate": 0.0001650280306826089, + "loss": 1.697, + "step": 5414 + }, + { + "epoch": 3.4958037443511945, + "grad_norm": 1.283680481555892, + "learning_rate": 0.00016496374797388637, + "loss": 1.4303, + "step": 5415 + }, + { + "epoch": 3.4964493221433184, + "grad_norm": 1.3784654239895613, + "learning_rate": 0.00016489946248923325, + "loss": 1.5252, + "step": 5416 + }, + { + "epoch": 3.4970948999354423, + "grad_norm": 1.4025424685086778, + "learning_rate": 0.00016483517424057515, + "loss": 1.6319, + "step": 5417 + }, + { + "epoch": 3.4977404777275662, + "grad_norm": 1.4815812090786842, + "learning_rate": 0.00016477088323983816, + "loss": 1.9437, + "step": 5418 + }, + { + "epoch": 3.49838605551969, + "grad_norm": 1.5058467574640944, + "learning_rate": 0.00016470658949894898, + "loss": 1.8001, + "step": 5419 + }, + { + "epoch": 3.499031633311814, + "grad_norm": 1.4274635938966158, + "learning_rate": 0.00016464229302983485, + "loss": 1.4599, + "step": 5420 + }, + { + "epoch": 3.499677211103938, + "grad_norm": 1.3976051785910928, + "learning_rate": 0.00016457799384442321, + "loss": 1.6991, + "step": 5421 + }, + { + "epoch": 3.500322788896062, + "grad_norm": 1.4254515028799257, + "learning_rate": 0.00016451369195464243, + "loss": 1.7907, + "step": 5422 + }, + { + "epoch": 3.500968366688186, + "grad_norm": 1.3249860519618832, + "learning_rate": 0.00016444938737242108, + "loss": 1.5841, + "step": 5423 + }, + { + "epoch": 3.50161394448031, + "grad_norm": 1.4821863657990755, + "learning_rate": 0.00016438508010968838, + "loss": 1.6533, + "step": 5424 + }, + { + "epoch": 3.5022595222724338, + "grad_norm": 1.3975194427047153, + "learning_rate": 0.00016432077017837396, + "loss": 1.8017, + "step": 5425 + }, + { + "epoch": 3.5029051000645577, + "grad_norm": 1.3798233450566673, + "learning_rate": 0.00016425645759040794, + "loss": 1.6376, + "step": 5426 + }, + { + "epoch": 3.5035506778566816, + "grad_norm": 1.2999654670272873, + "learning_rate": 0.0001641921423577211, + "loss": 1.5628, + "step": 5427 + }, + { + "epoch": 3.5041962556488055, + "grad_norm": 1.5824711387447141, + "learning_rate": 0.00016412782449224447, + "loss": 1.9211, + "step": 5428 + }, + { + "epoch": 3.5048418334409295, + "grad_norm": 1.460953848799459, + "learning_rate": 0.00016406350400590967, + "loss": 1.7963, + "step": 5429 + }, + { + "epoch": 3.5054874112330534, + "grad_norm": 1.2680909556304145, + "learning_rate": 0.00016399918091064886, + "loss": 1.4878, + "step": 5430 + }, + { + "epoch": 3.5061329890251773, + "grad_norm": 1.5306446322223963, + "learning_rate": 0.00016393485521839474, + "loss": 1.7681, + "step": 5431 + }, + { + "epoch": 3.5067785668173013, + "grad_norm": 1.449123077709795, + "learning_rate": 0.00016387052694108022, + "loss": 1.5317, + "step": 5432 + }, + { + "epoch": 3.507424144609425, + "grad_norm": 1.3827879380280839, + "learning_rate": 0.00016380619609063896, + "loss": 1.5952, + "step": 5433 + }, + { + "epoch": 3.508069722401549, + "grad_norm": 1.313739227331031, + "learning_rate": 0.00016374186267900502, + "loss": 1.3713, + "step": 5434 + }, + { + "epoch": 3.5087153001936735, + "grad_norm": 1.242715699997275, + "learning_rate": 0.00016367752671811282, + "loss": 1.6242, + "step": 5435 + }, + { + "epoch": 3.5093608779857974, + "grad_norm": 1.283039259724419, + "learning_rate": 0.00016361318821989744, + "loss": 1.6409, + "step": 5436 + }, + { + "epoch": 3.5100064557779214, + "grad_norm": 1.2485242259352325, + "learning_rate": 0.00016354884719629433, + "loss": 1.5519, + "step": 5437 + }, + { + "epoch": 3.5106520335700453, + "grad_norm": 1.2757355946785367, + "learning_rate": 0.00016348450365923935, + "loss": 1.5158, + "step": 5438 + }, + { + "epoch": 3.5112976113621692, + "grad_norm": 1.3337532809891939, + "learning_rate": 0.00016342015762066896, + "loss": 1.6616, + "step": 5439 + }, + { + "epoch": 3.511943189154293, + "grad_norm": 1.3360384011154627, + "learning_rate": 0.00016335580909252, + "loss": 1.6816, + "step": 5440 + }, + { + "epoch": 3.512588766946417, + "grad_norm": 1.4736898064148942, + "learning_rate": 0.00016329145808672974, + "loss": 1.746, + "step": 5441 + }, + { + "epoch": 3.513234344738541, + "grad_norm": 1.4881721498332787, + "learning_rate": 0.00016322710461523613, + "loss": 1.6206, + "step": 5442 + }, + { + "epoch": 3.513879922530665, + "grad_norm": 1.440450218671764, + "learning_rate": 0.00016316274868997716, + "loss": 1.7361, + "step": 5443 + }, + { + "epoch": 3.514525500322789, + "grad_norm": 1.4101971181862996, + "learning_rate": 0.00016309839032289166, + "loss": 1.6542, + "step": 5444 + }, + { + "epoch": 3.515171078114913, + "grad_norm": 1.3362520092941876, + "learning_rate": 0.00016303402952591884, + "loss": 1.5753, + "step": 5445 + }, + { + "epoch": 3.5158166559070367, + "grad_norm": 1.5049349660183402, + "learning_rate": 0.00016296966631099816, + "loss": 1.7558, + "step": 5446 + }, + { + "epoch": 3.5164622336991607, + "grad_norm": 1.5168189224569897, + "learning_rate": 0.00016290530069006974, + "loss": 1.6944, + "step": 5447 + }, + { + "epoch": 3.5171078114912846, + "grad_norm": 1.4284286259691843, + "learning_rate": 0.00016284093267507403, + "loss": 1.6856, + "step": 5448 + }, + { + "epoch": 3.5177533892834085, + "grad_norm": 1.496863746271414, + "learning_rate": 0.000162776562277952, + "loss": 1.6541, + "step": 5449 + }, + { + "epoch": 3.5183989670755325, + "grad_norm": 1.5145862855774899, + "learning_rate": 0.00016271218951064505, + "loss": 1.7988, + "step": 5450 + }, + { + "epoch": 3.5190445448676564, + "grad_norm": 1.3668758675363832, + "learning_rate": 0.00016264781438509493, + "loss": 1.6796, + "step": 5451 + }, + { + "epoch": 3.5196901226597808, + "grad_norm": 1.2887327413588825, + "learning_rate": 0.00016258343691324395, + "loss": 1.6576, + "step": 5452 + }, + { + "epoch": 3.5203357004519047, + "grad_norm": 1.4369364400427984, + "learning_rate": 0.00016251905710703478, + "loss": 1.6809, + "step": 5453 + }, + { + "epoch": 3.5209812782440286, + "grad_norm": 1.4122999970839332, + "learning_rate": 0.00016245467497841046, + "loss": 1.6886, + "step": 5454 + }, + { + "epoch": 3.5216268560361526, + "grad_norm": 1.4589227127047524, + "learning_rate": 0.0001623902905393147, + "loss": 1.788, + "step": 5455 + }, + { + "epoch": 3.5222724338282765, + "grad_norm": 1.246163264611578, + "learning_rate": 0.00016232590380169142, + "loss": 1.5448, + "step": 5456 + }, + { + "epoch": 3.5229180116204004, + "grad_norm": 1.3202259139682253, + "learning_rate": 0.0001622615147774849, + "loss": 1.5148, + "step": 5457 + }, + { + "epoch": 3.5235635894125243, + "grad_norm": 1.5662131727286903, + "learning_rate": 0.0001621971234786402, + "loss": 1.8684, + "step": 5458 + }, + { + "epoch": 3.5242091672046483, + "grad_norm": 1.5137702604596919, + "learning_rate": 0.00016213272991710243, + "loss": 1.7507, + "step": 5459 + }, + { + "epoch": 3.524854744996772, + "grad_norm": 1.45262425978305, + "learning_rate": 0.00016206833410481727, + "loss": 1.7204, + "step": 5460 + }, + { + "epoch": 3.525500322788896, + "grad_norm": 1.4844259739113097, + "learning_rate": 0.00016200393605373084, + "loss": 1.8472, + "step": 5461 + }, + { + "epoch": 3.52614590058102, + "grad_norm": 1.468121344404663, + "learning_rate": 0.00016193953577578967, + "loss": 1.7481, + "step": 5462 + }, + { + "epoch": 3.526791478373144, + "grad_norm": 1.5142378890863515, + "learning_rate": 0.00016187513328294058, + "loss": 1.6643, + "step": 5463 + }, + { + "epoch": 3.527437056165268, + "grad_norm": 1.476341363603049, + "learning_rate": 0.00016181072858713105, + "loss": 1.5934, + "step": 5464 + }, + { + "epoch": 3.528082633957392, + "grad_norm": 1.4711836778690535, + "learning_rate": 0.00016174632170030872, + "loss": 1.7403, + "step": 5465 + }, + { + "epoch": 3.528728211749516, + "grad_norm": 1.4994394705263356, + "learning_rate": 0.00016168191263442173, + "loss": 1.5508, + "step": 5466 + }, + { + "epoch": 3.5293737895416397, + "grad_norm": 1.3005494244403337, + "learning_rate": 0.00016161750140141872, + "loss": 1.6062, + "step": 5467 + }, + { + "epoch": 3.5300193673337636, + "grad_norm": 1.351336693162018, + "learning_rate": 0.00016155308801324853, + "loss": 1.6757, + "step": 5468 + }, + { + "epoch": 3.5306649451258876, + "grad_norm": 1.3844579179756944, + "learning_rate": 0.00016148867248186056, + "loss": 1.519, + "step": 5469 + }, + { + "epoch": 3.5313105229180115, + "grad_norm": 1.6121347921880422, + "learning_rate": 0.0001614242548192046, + "loss": 1.714, + "step": 5470 + }, + { + "epoch": 3.5319561007101354, + "grad_norm": 1.397920338500363, + "learning_rate": 0.00016135983503723077, + "loss": 1.6686, + "step": 5471 + }, + { + "epoch": 3.5326016785022594, + "grad_norm": 1.5257110180528655, + "learning_rate": 0.00016129541314788954, + "loss": 1.6607, + "step": 5472 + }, + { + "epoch": 3.5332472562943833, + "grad_norm": 1.2811876207878656, + "learning_rate": 0.00016123098916313196, + "loss": 1.6144, + "step": 5473 + }, + { + "epoch": 3.5338928340865072, + "grad_norm": 1.3973583838171129, + "learning_rate": 0.00016116656309490924, + "loss": 1.691, + "step": 5474 + }, + { + "epoch": 3.534538411878631, + "grad_norm": 1.3446313735089768, + "learning_rate": 0.00016110213495517314, + "loss": 1.5338, + "step": 5475 + }, + { + "epoch": 3.535183989670755, + "grad_norm": 1.4539240925109507, + "learning_rate": 0.00016103770475587575, + "loss": 1.6641, + "step": 5476 + }, + { + "epoch": 3.535829567462879, + "grad_norm": 1.4480726205058205, + "learning_rate": 0.0001609732725089695, + "loss": 1.6294, + "step": 5477 + }, + { + "epoch": 3.5364751452550034, + "grad_norm": 1.4953259326291188, + "learning_rate": 0.0001609088382264073, + "loss": 1.8102, + "step": 5478 + }, + { + "epoch": 3.5371207230471273, + "grad_norm": 1.450169047225617, + "learning_rate": 0.00016084440192014234, + "loss": 1.6999, + "step": 5479 + }, + { + "epoch": 3.5377663008392513, + "grad_norm": 1.3251270120930778, + "learning_rate": 0.00016077996360212824, + "loss": 1.5049, + "step": 5480 + }, + { + "epoch": 3.538411878631375, + "grad_norm": 1.4484619845711615, + "learning_rate": 0.000160715523284319, + "loss": 1.7549, + "step": 5481 + }, + { + "epoch": 3.539057456423499, + "grad_norm": 1.3129282007374117, + "learning_rate": 0.00016065108097866888, + "loss": 1.6128, + "step": 5482 + }, + { + "epoch": 3.539703034215623, + "grad_norm": 1.3311583994315208, + "learning_rate": 0.00016058663669713268, + "loss": 1.5866, + "step": 5483 + }, + { + "epoch": 3.540348612007747, + "grad_norm": 1.4742044052589058, + "learning_rate": 0.00016052219045166544, + "loss": 1.9922, + "step": 5484 + }, + { + "epoch": 3.540994189799871, + "grad_norm": 1.4131599078813437, + "learning_rate": 0.00016045774225422266, + "loss": 1.6091, + "step": 5485 + }, + { + "epoch": 3.541639767591995, + "grad_norm": 1.3832510852953985, + "learning_rate": 0.0001603932921167601, + "loss": 1.7539, + "step": 5486 + }, + { + "epoch": 3.5422853453841188, + "grad_norm": 1.408526023124659, + "learning_rate": 0.00016032884005123395, + "loss": 1.8133, + "step": 5487 + }, + { + "epoch": 3.5429309231762427, + "grad_norm": 1.4714386175745213, + "learning_rate": 0.0001602643860696007, + "loss": 1.6614, + "step": 5488 + }, + { + "epoch": 3.5435765009683666, + "grad_norm": 1.3269059237257628, + "learning_rate": 0.0001601999301838173, + "loss": 1.6149, + "step": 5489 + }, + { + "epoch": 3.5442220787604906, + "grad_norm": 1.6193238908116425, + "learning_rate": 0.00016013547240584095, + "loss": 1.816, + "step": 5490 + }, + { + "epoch": 3.5448676565526145, + "grad_norm": 1.43040753146005, + "learning_rate": 0.00016007101274762917, + "loss": 1.6328, + "step": 5491 + }, + { + "epoch": 3.5455132343447384, + "grad_norm": 1.4667038123653784, + "learning_rate": 0.00016000655122114005, + "loss": 1.7317, + "step": 5492 + }, + { + "epoch": 3.5461588121368623, + "grad_norm": 1.2917097261680401, + "learning_rate": 0.00015994208783833174, + "loss": 1.3908, + "step": 5493 + }, + { + "epoch": 3.5468043899289863, + "grad_norm": 2.7039934797653795, + "learning_rate": 0.00015987762261116293, + "loss": 1.9264, + "step": 5494 + }, + { + "epoch": 3.5474499677211107, + "grad_norm": 1.415885370993299, + "learning_rate": 0.00015981315555159257, + "loss": 1.7143, + "step": 5495 + }, + { + "epoch": 3.5480955455132346, + "grad_norm": 1.4183167314730956, + "learning_rate": 0.00015974868667158, + "loss": 1.5783, + "step": 5496 + }, + { + "epoch": 3.5487411233053585, + "grad_norm": 1.4426755051815163, + "learning_rate": 0.0001596842159830848, + "loss": 1.4934, + "step": 5497 + }, + { + "epoch": 3.5493867010974824, + "grad_norm": 1.5656513060784318, + "learning_rate": 0.000159619743498067, + "loss": 1.8837, + "step": 5498 + }, + { + "epoch": 3.5500322788896064, + "grad_norm": 1.5093118253169193, + "learning_rate": 0.00015955526922848692, + "loss": 1.758, + "step": 5499 + }, + { + "epoch": 3.5506778566817303, + "grad_norm": 1.3819827718487787, + "learning_rate": 0.00015949079318630514, + "loss": 1.6231, + "step": 5500 + }, + { + "epoch": 3.5513234344738542, + "grad_norm": 1.4279753894736842, + "learning_rate": 0.00015942631538348276, + "loss": 1.8716, + "step": 5501 + }, + { + "epoch": 3.551969012265978, + "grad_norm": 1.5760394328315634, + "learning_rate": 0.00015936183583198095, + "loss": 1.8747, + "step": 5502 + }, + { + "epoch": 3.552614590058102, + "grad_norm": 1.4922009947750103, + "learning_rate": 0.00015929735454376143, + "loss": 1.8003, + "step": 5503 + }, + { + "epoch": 3.553260167850226, + "grad_norm": 1.4266305556388943, + "learning_rate": 0.00015923287153078605, + "loss": 1.6701, + "step": 5504 + }, + { + "epoch": 3.55390574564235, + "grad_norm": 1.3745387140000624, + "learning_rate": 0.00015916838680501717, + "loss": 1.5528, + "step": 5505 + }, + { + "epoch": 3.554551323434474, + "grad_norm": 1.301160422704805, + "learning_rate": 0.00015910390037841742, + "loss": 1.3174, + "step": 5506 + }, + { + "epoch": 3.555196901226598, + "grad_norm": 1.4519036406654402, + "learning_rate": 0.00015903941226294956, + "loss": 1.8073, + "step": 5507 + }, + { + "epoch": 3.5558424790187217, + "grad_norm": 1.2481113943274356, + "learning_rate": 0.00015897492247057688, + "loss": 1.2717, + "step": 5508 + }, + { + "epoch": 3.5564880568108457, + "grad_norm": 1.4308439073692711, + "learning_rate": 0.00015891043101326293, + "loss": 1.5705, + "step": 5509 + }, + { + "epoch": 3.5571336346029696, + "grad_norm": 1.2229999208993634, + "learning_rate": 0.00015884593790297148, + "loss": 1.3036, + "step": 5510 + }, + { + "epoch": 3.5577792123950935, + "grad_norm": 1.3833783378512325, + "learning_rate": 0.0001587814431516667, + "loss": 1.6525, + "step": 5511 + }, + { + "epoch": 3.5584247901872175, + "grad_norm": 1.617512459233006, + "learning_rate": 0.00015871694677131308, + "loss": 1.7011, + "step": 5512 + }, + { + "epoch": 3.5590703679793414, + "grad_norm": 1.3229037354508424, + "learning_rate": 0.0001586524487738753, + "loss": 1.4868, + "step": 5513 + }, + { + "epoch": 3.5597159457714653, + "grad_norm": 1.4219973476851664, + "learning_rate": 0.00015858794917131847, + "loss": 1.6044, + "step": 5514 + }, + { + "epoch": 3.5603615235635893, + "grad_norm": 1.319718213434074, + "learning_rate": 0.0001585234479756079, + "loss": 1.5062, + "step": 5515 + }, + { + "epoch": 3.561007101355713, + "grad_norm": 1.2822127254901337, + "learning_rate": 0.00015845894519870917, + "loss": 1.4378, + "step": 5516 + }, + { + "epoch": 3.561652679147837, + "grad_norm": 1.609258034607658, + "learning_rate": 0.0001583944408525884, + "loss": 1.5941, + "step": 5517 + }, + { + "epoch": 3.562298256939961, + "grad_norm": 1.4964185739908595, + "learning_rate": 0.00015832993494921158, + "loss": 1.6921, + "step": 5518 + }, + { + "epoch": 3.562943834732085, + "grad_norm": 1.7769542273693557, + "learning_rate": 0.0001582654275005454, + "loss": 1.7019, + "step": 5519 + }, + { + "epoch": 3.563589412524209, + "grad_norm": 1.4263519455013915, + "learning_rate": 0.00015820091851855662, + "loss": 1.6372, + "step": 5520 + }, + { + "epoch": 3.564234990316333, + "grad_norm": 1.429593451647234, + "learning_rate": 0.00015813640801521227, + "loss": 1.5739, + "step": 5521 + }, + { + "epoch": 3.564880568108457, + "grad_norm": 1.3571377100647504, + "learning_rate": 0.00015807189600247978, + "loss": 1.5069, + "step": 5522 + }, + { + "epoch": 3.565526145900581, + "grad_norm": 1.450168540648339, + "learning_rate": 0.00015800738249232679, + "loss": 1.7991, + "step": 5523 + }, + { + "epoch": 3.566171723692705, + "grad_norm": 1.5297936547238695, + "learning_rate": 0.00015794286749672119, + "loss": 1.8225, + "step": 5524 + }, + { + "epoch": 3.566817301484829, + "grad_norm": 1.3657218416561971, + "learning_rate": 0.0001578783510276312, + "loss": 1.5064, + "step": 5525 + }, + { + "epoch": 3.567462879276953, + "grad_norm": 1.4385893122977222, + "learning_rate": 0.00015781383309702531, + "loss": 1.7004, + "step": 5526 + }, + { + "epoch": 3.568108457069077, + "grad_norm": 1.4349945633492411, + "learning_rate": 0.00015774931371687225, + "loss": 1.8428, + "step": 5527 + }, + { + "epoch": 3.568754034861201, + "grad_norm": 1.367598616001326, + "learning_rate": 0.00015768479289914104, + "loss": 1.82, + "step": 5528 + }, + { + "epoch": 3.5693996126533247, + "grad_norm": 1.2373967320746935, + "learning_rate": 0.0001576202706558009, + "loss": 1.3123, + "step": 5529 + }, + { + "epoch": 3.5700451904454487, + "grad_norm": 1.5673796669688762, + "learning_rate": 0.00015755574699882153, + "loss": 1.6317, + "step": 5530 + }, + { + "epoch": 3.5706907682375726, + "grad_norm": 1.5263924599190315, + "learning_rate": 0.00015749122194017265, + "loss": 1.7737, + "step": 5531 + }, + { + "epoch": 3.5713363460296965, + "grad_norm": 1.4638319518321734, + "learning_rate": 0.00015742669549182424, + "loss": 1.7232, + "step": 5532 + }, + { + "epoch": 3.5719819238218204, + "grad_norm": 1.6574889372758321, + "learning_rate": 0.00015736216766574674, + "loss": 1.7459, + "step": 5533 + }, + { + "epoch": 3.5726275016139444, + "grad_norm": 1.519324772677719, + "learning_rate": 0.00015729763847391074, + "loss": 1.6256, + "step": 5534 + }, + { + "epoch": 3.5732730794060683, + "grad_norm": 1.6166523265729351, + "learning_rate": 0.00015723310792828697, + "loss": 1.9009, + "step": 5535 + }, + { + "epoch": 3.5739186571981922, + "grad_norm": 1.464034480049274, + "learning_rate": 0.00015716857604084665, + "loss": 1.9012, + "step": 5536 + }, + { + "epoch": 3.574564234990316, + "grad_norm": 1.371443815591266, + "learning_rate": 0.00015710404282356103, + "loss": 1.6184, + "step": 5537 + }, + { + "epoch": 3.5752098127824405, + "grad_norm": 1.5142606019446863, + "learning_rate": 0.00015703950828840167, + "loss": 1.7904, + "step": 5538 + }, + { + "epoch": 3.5758553905745645, + "grad_norm": 1.6807293392307758, + "learning_rate": 0.00015697497244734047, + "loss": 1.7969, + "step": 5539 + }, + { + "epoch": 3.5765009683666884, + "grad_norm": 1.5159218695055854, + "learning_rate": 0.00015691043531234947, + "loss": 1.6121, + "step": 5540 + }, + { + "epoch": 3.5771465461588123, + "grad_norm": 1.468911717665801, + "learning_rate": 0.0001568458968954009, + "loss": 1.6497, + "step": 5541 + }, + { + "epoch": 3.5777921239509363, + "grad_norm": 1.528821085520616, + "learning_rate": 0.0001567813572084675, + "loss": 1.6437, + "step": 5542 + }, + { + "epoch": 3.57843770174306, + "grad_norm": 1.6229152054688012, + "learning_rate": 0.00015671681626352183, + "loss": 1.8152, + "step": 5543 + }, + { + "epoch": 3.579083279535184, + "grad_norm": 1.5079247577095984, + "learning_rate": 0.00015665227407253704, + "loss": 1.8213, + "step": 5544 + }, + { + "epoch": 3.579728857327308, + "grad_norm": 1.3717960596992593, + "learning_rate": 0.00015658773064748636, + "loss": 1.6949, + "step": 5545 + }, + { + "epoch": 3.580374435119432, + "grad_norm": 1.4036756548738616, + "learning_rate": 0.0001565231860003432, + "loss": 1.7387, + "step": 5546 + }, + { + "epoch": 3.581020012911556, + "grad_norm": 1.4487949665211166, + "learning_rate": 0.00015645864014308136, + "loss": 1.706, + "step": 5547 + }, + { + "epoch": 3.58166559070368, + "grad_norm": 1.5672962629746379, + "learning_rate": 0.00015639409308767472, + "loss": 1.8355, + "step": 5548 + }, + { + "epoch": 3.5823111684958038, + "grad_norm": 1.657677643229288, + "learning_rate": 0.00015632954484609738, + "loss": 1.7362, + "step": 5549 + }, + { + "epoch": 3.5829567462879277, + "grad_norm": 1.3972359557213188, + "learning_rate": 0.0001562649954303238, + "loss": 1.5287, + "step": 5550 + }, + { + "epoch": 3.5836023240800516, + "grad_norm": 1.3571755737270599, + "learning_rate": 0.0001562004448523285, + "loss": 1.6847, + "step": 5551 + }, + { + "epoch": 3.5842479018721756, + "grad_norm": 1.3310105958382077, + "learning_rate": 0.0001561358931240863, + "loss": 1.6159, + "step": 5552 + }, + { + "epoch": 3.5848934796642995, + "grad_norm": 1.3164624507832456, + "learning_rate": 0.00015607134025757223, + "loss": 1.5251, + "step": 5553 + }, + { + "epoch": 3.5855390574564234, + "grad_norm": 1.5269489052131426, + "learning_rate": 0.0001560067862647615, + "loss": 1.7901, + "step": 5554 + }, + { + "epoch": 3.5861846352485474, + "grad_norm": 1.5328329631986795, + "learning_rate": 0.00015594223115762956, + "loss": 1.7013, + "step": 5555 + }, + { + "epoch": 3.5868302130406713, + "grad_norm": 1.4389273511639173, + "learning_rate": 0.0001558776749481521, + "loss": 1.6168, + "step": 5556 + }, + { + "epoch": 3.587475790832795, + "grad_norm": 1.3821472215796715, + "learning_rate": 0.0001558131176483048, + "loss": 1.7468, + "step": 5557 + }, + { + "epoch": 3.588121368624919, + "grad_norm": 1.342313177484613, + "learning_rate": 0.00015574855927006387, + "loss": 1.5997, + "step": 5558 + }, + { + "epoch": 3.588766946417043, + "grad_norm": 1.4527107019981542, + "learning_rate": 0.00015568399982540554, + "loss": 1.7322, + "step": 5559 + }, + { + "epoch": 3.589412524209167, + "grad_norm": 1.4777599403072548, + "learning_rate": 0.0001556194393263062, + "loss": 1.6432, + "step": 5560 + }, + { + "epoch": 3.590058102001291, + "grad_norm": 1.3187417588789174, + "learning_rate": 0.00015555487778474254, + "loss": 1.6361, + "step": 5561 + }, + { + "epoch": 3.590703679793415, + "grad_norm": 2.151318021704009, + "learning_rate": 0.00015549031521269136, + "loss": 1.8028, + "step": 5562 + }, + { + "epoch": 3.591349257585539, + "grad_norm": 1.3327660387854035, + "learning_rate": 0.0001554257516221297, + "loss": 1.5445, + "step": 5563 + }, + { + "epoch": 3.5919948353776627, + "grad_norm": 1.3781995750518403, + "learning_rate": 0.00015536118702503483, + "loss": 1.6111, + "step": 5564 + }, + { + "epoch": 3.592640413169787, + "grad_norm": 1.3652047077113594, + "learning_rate": 0.00015529662143338405, + "loss": 1.6639, + "step": 5565 + }, + { + "epoch": 3.593285990961911, + "grad_norm": 1.317982973254695, + "learning_rate": 0.000155232054859155, + "loss": 1.6479, + "step": 5566 + }, + { + "epoch": 3.593931568754035, + "grad_norm": 1.4134069674338055, + "learning_rate": 0.00015516748731432553, + "loss": 1.7199, + "step": 5567 + }, + { + "epoch": 3.594577146546159, + "grad_norm": 1.5341518306994568, + "learning_rate": 0.00015510291881087342, + "loss": 1.6783, + "step": 5568 + }, + { + "epoch": 3.595222724338283, + "grad_norm": 1.397604188799298, + "learning_rate": 0.00015503834936077694, + "loss": 1.7387, + "step": 5569 + }, + { + "epoch": 3.5958683021304068, + "grad_norm": 1.3860464245266038, + "learning_rate": 0.00015497377897601432, + "loss": 1.5025, + "step": 5570 + }, + { + "epoch": 3.5965138799225307, + "grad_norm": 1.4504578440326739, + "learning_rate": 0.00015490920766856404, + "loss": 1.8373, + "step": 5571 + }, + { + "epoch": 3.5971594577146546, + "grad_norm": 1.240287863327571, + "learning_rate": 0.00015484463545040482, + "loss": 1.4353, + "step": 5572 + }, + { + "epoch": 3.5978050355067785, + "grad_norm": 1.320110305099365, + "learning_rate": 0.00015478006233351542, + "loss": 1.5374, + "step": 5573 + }, + { + "epoch": 3.5984506132989025, + "grad_norm": 1.359806303312331, + "learning_rate": 0.0001547154883298748, + "loss": 1.5764, + "step": 5574 + }, + { + "epoch": 3.5990961910910264, + "grad_norm": 1.2822022226634586, + "learning_rate": 0.00015465091345146214, + "loss": 1.5266, + "step": 5575 + }, + { + "epoch": 3.5997417688831503, + "grad_norm": 1.4559103717795319, + "learning_rate": 0.00015458633771025675, + "loss": 1.8538, + "step": 5576 + }, + { + "epoch": 3.6003873466752743, + "grad_norm": 1.4818757401287042, + "learning_rate": 0.00015452176111823806, + "loss": 1.9027, + "step": 5577 + }, + { + "epoch": 3.601032924467398, + "grad_norm": 1.500455924487847, + "learning_rate": 0.00015445718368738585, + "loss": 1.5937, + "step": 5578 + }, + { + "epoch": 3.601678502259522, + "grad_norm": 1.4451035677010162, + "learning_rate": 0.00015439260542967966, + "loss": 1.679, + "step": 5579 + }, + { + "epoch": 3.602324080051646, + "grad_norm": 1.3834866707060647, + "learning_rate": 0.00015432802635709963, + "loss": 1.7194, + "step": 5580 + }, + { + "epoch": 3.6029696578437704, + "grad_norm": 1.520058157246028, + "learning_rate": 0.00015426344648162578, + "loss": 1.6261, + "step": 5581 + }, + { + "epoch": 3.6036152356358944, + "grad_norm": 1.487872263010954, + "learning_rate": 0.00015419886581523827, + "loss": 1.7179, + "step": 5582 + }, + { + "epoch": 3.6042608134280183, + "grad_norm": 1.4350418599309511, + "learning_rate": 0.00015413428436991764, + "loss": 1.5792, + "step": 5583 + }, + { + "epoch": 3.6049063912201422, + "grad_norm": 1.4510542124624264, + "learning_rate": 0.0001540697021576443, + "loss": 1.5072, + "step": 5584 + }, + { + "epoch": 3.605551969012266, + "grad_norm": 1.388250637966276, + "learning_rate": 0.00015400511919039895, + "loss": 1.7909, + "step": 5585 + }, + { + "epoch": 3.60619754680439, + "grad_norm": 1.493025971497777, + "learning_rate": 0.00015394053548016244, + "loss": 1.7006, + "step": 5586 + }, + { + "epoch": 3.606843124596514, + "grad_norm": 1.4034774798007552, + "learning_rate": 0.00015387595103891566, + "loss": 1.7744, + "step": 5587 + }, + { + "epoch": 3.607488702388638, + "grad_norm": 1.3615202466488445, + "learning_rate": 0.00015381136587863973, + "loss": 1.564, + "step": 5588 + }, + { + "epoch": 3.608134280180762, + "grad_norm": 1.3992433215695124, + "learning_rate": 0.00015374678001131586, + "loss": 1.7153, + "step": 5589 + }, + { + "epoch": 3.608779857972886, + "grad_norm": 1.8301751456789483, + "learning_rate": 0.00015368219344892537, + "loss": 1.5326, + "step": 5590 + }, + { + "epoch": 3.6094254357650097, + "grad_norm": 1.390587438355313, + "learning_rate": 0.00015361760620344976, + "loss": 1.6424, + "step": 5591 + }, + { + "epoch": 3.6100710135571337, + "grad_norm": 1.3822783038498223, + "learning_rate": 0.0001535530182868707, + "loss": 1.6497, + "step": 5592 + }, + { + "epoch": 3.6107165913492576, + "grad_norm": 1.3846720111290192, + "learning_rate": 0.0001534884297111698, + "loss": 1.6516, + "step": 5593 + }, + { + "epoch": 3.6113621691413815, + "grad_norm": 1.5040640439382933, + "learning_rate": 0.000153423840488329, + "loss": 1.7061, + "step": 5594 + }, + { + "epoch": 3.6120077469335055, + "grad_norm": 1.4494046740185789, + "learning_rate": 0.00015335925063033023, + "loss": 1.6143, + "step": 5595 + }, + { + "epoch": 3.6126533247256294, + "grad_norm": 1.3414761672377318, + "learning_rate": 0.00015329466014915558, + "loss": 1.6207, + "step": 5596 + }, + { + "epoch": 3.6132989025177533, + "grad_norm": 1.4135525368138262, + "learning_rate": 0.00015323006905678733, + "loss": 1.7375, + "step": 5597 + }, + { + "epoch": 3.6139444803098772, + "grad_norm": 1.4223378571177807, + "learning_rate": 0.00015316547736520767, + "loss": 1.779, + "step": 5598 + }, + { + "epoch": 3.614590058102001, + "grad_norm": 1.4583278599586527, + "learning_rate": 0.00015310088508639912, + "loss": 1.7111, + "step": 5599 + }, + { + "epoch": 3.615235635894125, + "grad_norm": 1.349140833819992, + "learning_rate": 0.00015303629223234418, + "loss": 1.6298, + "step": 5600 + }, + { + "epoch": 3.615881213686249, + "grad_norm": 1.3720121943244874, + "learning_rate": 0.00015297169881502552, + "loss": 1.6209, + "step": 5601 + }, + { + "epoch": 3.616526791478373, + "grad_norm": 1.504630499507019, + "learning_rate": 0.00015290710484642582, + "loss": 1.7254, + "step": 5602 + }, + { + "epoch": 3.617172369270497, + "grad_norm": 1.4432918135368031, + "learning_rate": 0.00015284251033852807, + "loss": 1.5344, + "step": 5603 + }, + { + "epoch": 3.617817947062621, + "grad_norm": 1.5688808982356541, + "learning_rate": 0.00015277791530331507, + "loss": 1.7952, + "step": 5604 + }, + { + "epoch": 3.6184635248547448, + "grad_norm": 1.4165100989783856, + "learning_rate": 0.00015271331975276996, + "loss": 1.6221, + "step": 5605 + }, + { + "epoch": 3.6191091026468687, + "grad_norm": 1.4010032782426824, + "learning_rate": 0.00015264872369887588, + "loss": 1.6712, + "step": 5606 + }, + { + "epoch": 3.6197546804389926, + "grad_norm": 1.4505056354089003, + "learning_rate": 0.00015258412715361607, + "loss": 1.7712, + "step": 5607 + }, + { + "epoch": 3.620400258231117, + "grad_norm": 1.5500732572589764, + "learning_rate": 0.0001525195301289738, + "loss": 1.8214, + "step": 5608 + }, + { + "epoch": 3.621045836023241, + "grad_norm": 1.5771882434378364, + "learning_rate": 0.00015245493263693255, + "loss": 1.7829, + "step": 5609 + }, + { + "epoch": 3.621691413815365, + "grad_norm": 1.5177517311329374, + "learning_rate": 0.00015239033468947584, + "loss": 1.7271, + "step": 5610 + }, + { + "epoch": 3.622336991607489, + "grad_norm": 1.4370704232471598, + "learning_rate": 0.0001523257362985872, + "loss": 1.6343, + "step": 5611 + }, + { + "epoch": 3.6229825693996127, + "grad_norm": 1.3549856418601887, + "learning_rate": 0.00015226113747625033, + "loss": 1.5943, + "step": 5612 + }, + { + "epoch": 3.6236281471917366, + "grad_norm": 1.3621232714048837, + "learning_rate": 0.00015219653823444899, + "loss": 1.5048, + "step": 5613 + }, + { + "epoch": 3.6242737249838606, + "grad_norm": 1.453863021513823, + "learning_rate": 0.000152131938585167, + "loss": 1.5979, + "step": 5614 + }, + { + "epoch": 3.6249193027759845, + "grad_norm": 1.3448690376210297, + "learning_rate": 0.00015206733854038827, + "loss": 1.644, + "step": 5615 + }, + { + "epoch": 3.6255648805681084, + "grad_norm": 1.3996152037674174, + "learning_rate": 0.0001520027381120967, + "loss": 1.8105, + "step": 5616 + }, + { + "epoch": 3.6262104583602324, + "grad_norm": 1.5446977531297836, + "learning_rate": 0.00015193813731227657, + "loss": 1.574, + "step": 5617 + }, + { + "epoch": 3.6268560361523563, + "grad_norm": 1.4259278422541772, + "learning_rate": 0.00015187353615291174, + "loss": 1.6278, + "step": 5618 + }, + { + "epoch": 3.6275016139444802, + "grad_norm": 1.317737099283661, + "learning_rate": 0.00015180893464598652, + "loss": 1.454, + "step": 5619 + }, + { + "epoch": 3.628147191736604, + "grad_norm": 1.618273474739503, + "learning_rate": 0.00015174433280348512, + "loss": 1.7793, + "step": 5620 + }, + { + "epoch": 3.628792769528728, + "grad_norm": 1.7948871847058852, + "learning_rate": 0.00015167973063739192, + "loss": 1.7664, + "step": 5621 + }, + { + "epoch": 3.629438347320852, + "grad_norm": 1.5049358098423529, + "learning_rate": 0.00015161512815969124, + "loss": 1.5964, + "step": 5622 + }, + { + "epoch": 3.630083925112976, + "grad_norm": 1.4268833698709684, + "learning_rate": 0.0001515505253823675, + "loss": 1.6469, + "step": 5623 + }, + { + "epoch": 3.6307295029051003, + "grad_norm": 1.5034862740038522, + "learning_rate": 0.0001514859223174052, + "loss": 1.6419, + "step": 5624 + }, + { + "epoch": 3.6313750806972243, + "grad_norm": 1.47346359070444, + "learning_rate": 0.0001514213189767889, + "loss": 1.7224, + "step": 5625 + }, + { + "epoch": 3.632020658489348, + "grad_norm": 1.5397354589487455, + "learning_rate": 0.00015135671537250318, + "loss": 1.8338, + "step": 5626 + }, + { + "epoch": 3.632666236281472, + "grad_norm": 1.5375944566766617, + "learning_rate": 0.00015129211151653264, + "loss": 1.7155, + "step": 5627 + }, + { + "epoch": 3.633311814073596, + "grad_norm": 1.5226087762723317, + "learning_rate": 0.00015122750742086204, + "loss": 1.9741, + "step": 5628 + }, + { + "epoch": 3.63395739186572, + "grad_norm": 1.4975025606282069, + "learning_rate": 0.00015116290309747602, + "loss": 1.8462, + "step": 5629 + }, + { + "epoch": 3.634602969657844, + "grad_norm": 1.4730795432839527, + "learning_rate": 0.00015109829855835941, + "loss": 1.6994, + "step": 5630 + }, + { + "epoch": 3.635248547449968, + "grad_norm": 1.4953379344390925, + "learning_rate": 0.00015103369381549704, + "loss": 1.6649, + "step": 5631 + }, + { + "epoch": 3.6358941252420918, + "grad_norm": 1.3995632837140772, + "learning_rate": 0.00015096908888087373, + "loss": 1.5738, + "step": 5632 + }, + { + "epoch": 3.6365397030342157, + "grad_norm": 1.3772331919396648, + "learning_rate": 0.00015090448376647435, + "loss": 1.6293, + "step": 5633 + }, + { + "epoch": 3.6371852808263396, + "grad_norm": 1.368096410612515, + "learning_rate": 0.00015083987848428388, + "loss": 1.6493, + "step": 5634 + }, + { + "epoch": 3.6378308586184636, + "grad_norm": 1.5609996072329193, + "learning_rate": 0.0001507752730462872, + "loss": 1.7169, + "step": 5635 + }, + { + "epoch": 3.6384764364105875, + "grad_norm": 1.4931591991399322, + "learning_rate": 0.00015071066746446933, + "loss": 1.4679, + "step": 5636 + }, + { + "epoch": 3.6391220142027114, + "grad_norm": 1.4614379787626428, + "learning_rate": 0.00015064606175081528, + "loss": 1.6839, + "step": 5637 + }, + { + "epoch": 3.6397675919948353, + "grad_norm": 1.5597794859465421, + "learning_rate": 0.00015058145591731006, + "loss": 1.6871, + "step": 5638 + }, + { + "epoch": 3.6404131697869593, + "grad_norm": 1.415627511486125, + "learning_rate": 0.00015051684997593873, + "loss": 1.6182, + "step": 5639 + }, + { + "epoch": 3.641058747579083, + "grad_norm": 1.480606763245759, + "learning_rate": 0.00015045224393868634, + "loss": 1.4462, + "step": 5640 + }, + { + "epoch": 3.641704325371207, + "grad_norm": 1.5353412890150124, + "learning_rate": 0.000150387637817538, + "loss": 1.6983, + "step": 5641 + }, + { + "epoch": 3.642349903163331, + "grad_norm": 1.6085538158367783, + "learning_rate": 0.00015032303162447886, + "loss": 1.9859, + "step": 5642 + }, + { + "epoch": 3.642995480955455, + "grad_norm": 1.8532941837390218, + "learning_rate": 0.00015025842537149398, + "loss": 1.6008, + "step": 5643 + }, + { + "epoch": 3.643641058747579, + "grad_norm": 1.4576412601542563, + "learning_rate": 0.0001501938190705685, + "loss": 1.6287, + "step": 5644 + }, + { + "epoch": 3.644286636539703, + "grad_norm": 1.5646478653417522, + "learning_rate": 0.0001501292127336876, + "loss": 1.6589, + "step": 5645 + }, + { + "epoch": 3.644932214331827, + "grad_norm": 1.455510928447438, + "learning_rate": 0.00015006460637283637, + "loss": 1.4454, + "step": 5646 + }, + { + "epoch": 3.6455777921239507, + "grad_norm": 1.4455466962254329, + "learning_rate": 0.00015, + "loss": 1.6018, + "step": 5647 + }, + { + "epoch": 3.6462233699160747, + "grad_norm": 1.3801537987814103, + "learning_rate": 0.0001499353936271636, + "loss": 1.664, + "step": 5648 + }, + { + "epoch": 3.6468689477081986, + "grad_norm": 1.38779033715445, + "learning_rate": 0.00014987078726631242, + "loss": 1.5434, + "step": 5649 + }, + { + "epoch": 3.6475145255003225, + "grad_norm": 1.4941564013527528, + "learning_rate": 0.00014980618092943148, + "loss": 1.8017, + "step": 5650 + }, + { + "epoch": 3.648160103292447, + "grad_norm": 1.4063473518704546, + "learning_rate": 0.000149741574628506, + "loss": 1.7136, + "step": 5651 + }, + { + "epoch": 3.648805681084571, + "grad_norm": 1.4185772118855204, + "learning_rate": 0.00014967696837552116, + "loss": 1.6725, + "step": 5652 + }, + { + "epoch": 3.6494512588766947, + "grad_norm": 1.472243935334385, + "learning_rate": 0.000149612362182462, + "loss": 1.865, + "step": 5653 + }, + { + "epoch": 3.6500968366688187, + "grad_norm": 1.3948406590579472, + "learning_rate": 0.00014954775606131364, + "loss": 1.4778, + "step": 5654 + }, + { + "epoch": 3.6507424144609426, + "grad_norm": 1.4747103384168663, + "learning_rate": 0.0001494831500240613, + "loss": 1.8792, + "step": 5655 + }, + { + "epoch": 3.6513879922530665, + "grad_norm": 1.5284977053178235, + "learning_rate": 0.00014941854408268994, + "loss": 1.9228, + "step": 5656 + }, + { + "epoch": 3.6520335700451905, + "grad_norm": 1.407204062016827, + "learning_rate": 0.00014935393824918472, + "loss": 1.6913, + "step": 5657 + }, + { + "epoch": 3.6526791478373144, + "grad_norm": 1.4266572525535883, + "learning_rate": 0.00014928933253553067, + "loss": 1.6409, + "step": 5658 + }, + { + "epoch": 3.6533247256294383, + "grad_norm": 1.3385994743983034, + "learning_rate": 0.0001492247269537128, + "loss": 1.6739, + "step": 5659 + }, + { + "epoch": 3.6539703034215623, + "grad_norm": 1.5085516951444913, + "learning_rate": 0.00014916012151571612, + "loss": 1.8633, + "step": 5660 + }, + { + "epoch": 3.654615881213686, + "grad_norm": 1.3651696836093503, + "learning_rate": 0.0001490955162335256, + "loss": 1.6617, + "step": 5661 + }, + { + "epoch": 3.65526145900581, + "grad_norm": 1.2578628538499976, + "learning_rate": 0.00014903091111912627, + "loss": 1.4137, + "step": 5662 + }, + { + "epoch": 3.655907036797934, + "grad_norm": 1.53583185630444, + "learning_rate": 0.00014896630618450296, + "loss": 1.7132, + "step": 5663 + }, + { + "epoch": 3.656552614590058, + "grad_norm": 1.4456218671315566, + "learning_rate": 0.00014890170144164053, + "loss": 1.6548, + "step": 5664 + }, + { + "epoch": 3.657198192382182, + "grad_norm": 1.3527461928818334, + "learning_rate": 0.00014883709690252398, + "loss": 1.4111, + "step": 5665 + }, + { + "epoch": 3.657843770174306, + "grad_norm": 1.3510376805247128, + "learning_rate": 0.00014877249257913796, + "loss": 1.488, + "step": 5666 + }, + { + "epoch": 3.65848934796643, + "grad_norm": 1.5914990871088002, + "learning_rate": 0.00014870788848346734, + "loss": 1.6428, + "step": 5667 + }, + { + "epoch": 3.659134925758554, + "grad_norm": 1.3568394575126135, + "learning_rate": 0.00014864328462749684, + "loss": 1.5736, + "step": 5668 + }, + { + "epoch": 3.659780503550678, + "grad_norm": 1.4861613550482717, + "learning_rate": 0.0001485786810232111, + "loss": 1.7227, + "step": 5669 + }, + { + "epoch": 3.660426081342802, + "grad_norm": 1.3236995862573764, + "learning_rate": 0.0001485140776825948, + "loss": 1.6533, + "step": 5670 + }, + { + "epoch": 3.661071659134926, + "grad_norm": 1.564413873217063, + "learning_rate": 0.00014844947461763254, + "loss": 1.6743, + "step": 5671 + }, + { + "epoch": 3.66171723692705, + "grad_norm": 1.29908555511057, + "learning_rate": 0.00014838487184030876, + "loss": 1.3753, + "step": 5672 + }, + { + "epoch": 3.662362814719174, + "grad_norm": 1.4945952305896522, + "learning_rate": 0.00014832026936260803, + "loss": 1.8515, + "step": 5673 + }, + { + "epoch": 3.6630083925112977, + "grad_norm": 1.5521970472492312, + "learning_rate": 0.00014825566719651486, + "loss": 1.8686, + "step": 5674 + }, + { + "epoch": 3.6636539703034217, + "grad_norm": 1.5686025622531026, + "learning_rate": 0.00014819106535401348, + "loss": 1.6856, + "step": 5675 + }, + { + "epoch": 3.6642995480955456, + "grad_norm": 1.5085608597201705, + "learning_rate": 0.00014812646384708823, + "loss": 1.9269, + "step": 5676 + }, + { + "epoch": 3.6649451258876695, + "grad_norm": 1.4366825003084622, + "learning_rate": 0.00014806186268772345, + "loss": 1.4474, + "step": 5677 + }, + { + "epoch": 3.6655907036797934, + "grad_norm": 1.4164212451572062, + "learning_rate": 0.00014799726188790326, + "loss": 1.7094, + "step": 5678 + }, + { + "epoch": 3.6662362814719174, + "grad_norm": 1.4159781749344575, + "learning_rate": 0.00014793266145961173, + "loss": 1.862, + "step": 5679 + }, + { + "epoch": 3.6668818592640413, + "grad_norm": 1.3604121503123332, + "learning_rate": 0.00014786806141483302, + "loss": 1.605, + "step": 5680 + }, + { + "epoch": 3.6675274370561652, + "grad_norm": 1.463165978680446, + "learning_rate": 0.000147803461765551, + "loss": 1.5867, + "step": 5681 + }, + { + "epoch": 3.668173014848289, + "grad_norm": 1.3620266827587115, + "learning_rate": 0.00014773886252374968, + "loss": 1.6416, + "step": 5682 + }, + { + "epoch": 3.668818592640413, + "grad_norm": 1.3313630257526607, + "learning_rate": 0.00014767426370141282, + "loss": 1.6288, + "step": 5683 + }, + { + "epoch": 3.669464170432537, + "grad_norm": 1.6908047827158308, + "learning_rate": 0.00014760966531052414, + "loss": 1.8229, + "step": 5684 + }, + { + "epoch": 3.670109748224661, + "grad_norm": 1.241470625191328, + "learning_rate": 0.00014754506736306742, + "loss": 1.4048, + "step": 5685 + }, + { + "epoch": 3.670755326016785, + "grad_norm": 1.5386221054935736, + "learning_rate": 0.00014748046987102617, + "loss": 1.5926, + "step": 5686 + }, + { + "epoch": 3.671400903808909, + "grad_norm": 1.7070811553700374, + "learning_rate": 0.00014741587284638395, + "loss": 1.923, + "step": 5687 + }, + { + "epoch": 3.6720464816010328, + "grad_norm": 1.6159571346022004, + "learning_rate": 0.0001473512763011241, + "loss": 2.1312, + "step": 5688 + }, + { + "epoch": 3.6726920593931567, + "grad_norm": 1.4514896185524155, + "learning_rate": 0.00014728668024723, + "loss": 1.7346, + "step": 5689 + }, + { + "epoch": 3.6733376371852806, + "grad_norm": 1.3885060216516554, + "learning_rate": 0.00014722208469668494, + "loss": 1.7042, + "step": 5690 + }, + { + "epoch": 3.6739832149774045, + "grad_norm": 1.4170506300755727, + "learning_rate": 0.00014715748966147193, + "loss": 1.7405, + "step": 5691 + }, + { + "epoch": 3.6746287927695285, + "grad_norm": 1.3094988929578018, + "learning_rate": 0.00014709289515357416, + "loss": 1.5449, + "step": 5692 + }, + { + "epoch": 3.6752743705616524, + "grad_norm": 1.5140640766941003, + "learning_rate": 0.00014702830118497453, + "loss": 1.4061, + "step": 5693 + }, + { + "epoch": 3.675919948353777, + "grad_norm": 1.4673037954261512, + "learning_rate": 0.00014696370776765583, + "loss": 1.743, + "step": 5694 + }, + { + "epoch": 3.6765655261459007, + "grad_norm": 1.461167351511691, + "learning_rate": 0.00014689911491360088, + "loss": 1.8662, + "step": 5695 + }, + { + "epoch": 3.6772111039380246, + "grad_norm": 1.2994147610384388, + "learning_rate": 0.00014683452263479236, + "loss": 1.6593, + "step": 5696 + }, + { + "epoch": 3.6778566817301486, + "grad_norm": 1.380468145147164, + "learning_rate": 0.0001467699309432127, + "loss": 1.7375, + "step": 5697 + }, + { + "epoch": 3.6785022595222725, + "grad_norm": 1.3175885650449497, + "learning_rate": 0.00014670533985084434, + "loss": 1.5454, + "step": 5698 + }, + { + "epoch": 3.6791478373143964, + "grad_norm": 1.3639138155401804, + "learning_rate": 0.00014664074936966977, + "loss": 1.5401, + "step": 5699 + }, + { + "epoch": 3.6797934151065204, + "grad_norm": 1.3570850405124, + "learning_rate": 0.000146576159511671, + "loss": 1.6627, + "step": 5700 + }, + { + "epoch": 3.6804389928986443, + "grad_norm": 1.5147168867607939, + "learning_rate": 0.00014651157028883015, + "loss": 1.9075, + "step": 5701 + }, + { + "epoch": 3.681084570690768, + "grad_norm": 1.5870454500539433, + "learning_rate": 0.00014644698171312933, + "loss": 1.8032, + "step": 5702 + }, + { + "epoch": 3.681730148482892, + "grad_norm": 1.3236403665052776, + "learning_rate": 0.00014638239379655021, + "loss": 1.527, + "step": 5703 + }, + { + "epoch": 3.682375726275016, + "grad_norm": 1.3800557211361222, + "learning_rate": 0.0001463178065510746, + "loss": 1.5901, + "step": 5704 + }, + { + "epoch": 3.68302130406714, + "grad_norm": 1.4663806013961287, + "learning_rate": 0.00014625321998868417, + "loss": 1.8345, + "step": 5705 + }, + { + "epoch": 3.683666881859264, + "grad_norm": 1.3216282952617873, + "learning_rate": 0.00014618863412136027, + "loss": 1.5974, + "step": 5706 + }, + { + "epoch": 3.684312459651388, + "grad_norm": 1.5331776688283667, + "learning_rate": 0.00014612404896108432, + "loss": 1.6457, + "step": 5707 + }, + { + "epoch": 3.684958037443512, + "grad_norm": 1.5843358035918458, + "learning_rate": 0.00014605946451983759, + "loss": 1.506, + "step": 5708 + }, + { + "epoch": 3.6856036152356357, + "grad_norm": 1.543604258855849, + "learning_rate": 0.00014599488080960102, + "loss": 1.9357, + "step": 5709 + }, + { + "epoch": 3.68624919302776, + "grad_norm": 1.4586012478648136, + "learning_rate": 0.00014593029784235569, + "loss": 1.7417, + "step": 5710 + }, + { + "epoch": 3.686894770819884, + "grad_norm": 1.4623114244147317, + "learning_rate": 0.0001458657156300823, + "loss": 1.782, + "step": 5711 + }, + { + "epoch": 3.687540348612008, + "grad_norm": 1.5839042934393546, + "learning_rate": 0.0001458011341847617, + "loss": 1.8299, + "step": 5712 + }, + { + "epoch": 3.688185926404132, + "grad_norm": 1.3899463203457294, + "learning_rate": 0.0001457365535183742, + "loss": 1.3964, + "step": 5713 + }, + { + "epoch": 3.688831504196256, + "grad_norm": 1.4977331000083085, + "learning_rate": 0.00014567197364290035, + "loss": 1.6884, + "step": 5714 + }, + { + "epoch": 3.6894770819883798, + "grad_norm": 1.1999775643197896, + "learning_rate": 0.00014560739457032034, + "loss": 1.4674, + "step": 5715 + }, + { + "epoch": 3.6901226597805037, + "grad_norm": 1.7961727910308987, + "learning_rate": 0.00014554281631261415, + "loss": 1.6929, + "step": 5716 + }, + { + "epoch": 3.6907682375726276, + "grad_norm": 1.5762540214813436, + "learning_rate": 0.00014547823888176188, + "loss": 1.9256, + "step": 5717 + }, + { + "epoch": 3.6914138153647515, + "grad_norm": 1.3956508916285784, + "learning_rate": 0.00014541366228974326, + "loss": 1.5964, + "step": 5718 + }, + { + "epoch": 3.6920593931568755, + "grad_norm": 1.4748952819016754, + "learning_rate": 0.00014534908654853783, + "loss": 1.7752, + "step": 5719 + }, + { + "epoch": 3.6927049709489994, + "grad_norm": 1.4487872828631467, + "learning_rate": 0.00014528451167012521, + "loss": 1.658, + "step": 5720 + }, + { + "epoch": 3.6933505487411233, + "grad_norm": 1.3221289332912085, + "learning_rate": 0.00014521993766648464, + "loss": 1.2868, + "step": 5721 + }, + { + "epoch": 3.6939961265332473, + "grad_norm": 1.3861874364267497, + "learning_rate": 0.00014515536454959518, + "loss": 1.6048, + "step": 5722 + }, + { + "epoch": 3.694641704325371, + "grad_norm": 1.4730950543845658, + "learning_rate": 0.0001450907923314359, + "loss": 1.8236, + "step": 5723 + }, + { + "epoch": 3.695287282117495, + "grad_norm": 1.4617083729905125, + "learning_rate": 0.00014502622102398568, + "loss": 1.5214, + "step": 5724 + }, + { + "epoch": 3.695932859909619, + "grad_norm": 1.460187137422759, + "learning_rate": 0.0001449616506392231, + "loss": 1.7626, + "step": 5725 + }, + { + "epoch": 3.696578437701743, + "grad_norm": 1.3044045695154851, + "learning_rate": 0.00014489708118912655, + "loss": 1.3627, + "step": 5726 + }, + { + "epoch": 3.697224015493867, + "grad_norm": 1.3954379805365504, + "learning_rate": 0.00014483251268567453, + "loss": 1.6671, + "step": 5727 + }, + { + "epoch": 3.697869593285991, + "grad_norm": 1.3269945271039951, + "learning_rate": 0.000144767945140845, + "loss": 1.5397, + "step": 5728 + }, + { + "epoch": 3.698515171078115, + "grad_norm": 1.5040534764541167, + "learning_rate": 0.00014470337856661592, + "loss": 1.8168, + "step": 5729 + }, + { + "epoch": 3.6991607488702387, + "grad_norm": 1.4769609147645233, + "learning_rate": 0.0001446388129749652, + "loss": 1.6967, + "step": 5730 + }, + { + "epoch": 3.6998063266623626, + "grad_norm": 1.4370173156673887, + "learning_rate": 0.00014457424837787026, + "loss": 1.673, + "step": 5731 + }, + { + "epoch": 3.7004519044544866, + "grad_norm": 1.5833671672235776, + "learning_rate": 0.00014450968478730864, + "loss": 1.8941, + "step": 5732 + }, + { + "epoch": 3.7010974822466105, + "grad_norm": 1.6689648394667058, + "learning_rate": 0.0001444451222152575, + "loss": 1.6863, + "step": 5733 + }, + { + "epoch": 3.7017430600387344, + "grad_norm": 1.585375926078437, + "learning_rate": 0.00014438056067369378, + "loss": 1.768, + "step": 5734 + }, + { + "epoch": 3.7023886378308584, + "grad_norm": 1.4243913116751705, + "learning_rate": 0.00014431600017459446, + "loss": 1.7356, + "step": 5735 + }, + { + "epoch": 3.7030342156229823, + "grad_norm": 1.5484800002077361, + "learning_rate": 0.00014425144072993608, + "loss": 1.6101, + "step": 5736 + }, + { + "epoch": 3.7036797934151067, + "grad_norm": 1.3283385370384684, + "learning_rate": 0.00014418688235169519, + "loss": 1.6678, + "step": 5737 + }, + { + "epoch": 3.7043253712072306, + "grad_norm": 1.6174079585408108, + "learning_rate": 0.00014412232505184793, + "loss": 1.9041, + "step": 5738 + }, + { + "epoch": 3.7049709489993545, + "grad_norm": 1.5523018582377646, + "learning_rate": 0.00014405776884237042, + "loss": 1.9035, + "step": 5739 + }, + { + "epoch": 3.7056165267914785, + "grad_norm": 1.4066335001931916, + "learning_rate": 0.0001439932137352385, + "loss": 1.6401, + "step": 5740 + }, + { + "epoch": 3.7062621045836024, + "grad_norm": 1.3109161045300852, + "learning_rate": 0.00014392865974242774, + "loss": 1.4623, + "step": 5741 + }, + { + "epoch": 3.7069076823757263, + "grad_norm": 1.5734356675928203, + "learning_rate": 0.00014386410687591367, + "loss": 1.7926, + "step": 5742 + }, + { + "epoch": 3.7075532601678503, + "grad_norm": 1.4333070883566357, + "learning_rate": 0.00014379955514767152, + "loss": 1.7525, + "step": 5743 + }, + { + "epoch": 3.708198837959974, + "grad_norm": 1.5107596291546856, + "learning_rate": 0.00014373500456967619, + "loss": 1.7831, + "step": 5744 + }, + { + "epoch": 3.708844415752098, + "grad_norm": 1.3950179294222795, + "learning_rate": 0.0001436704551539026, + "loss": 1.6227, + "step": 5745 + }, + { + "epoch": 3.709489993544222, + "grad_norm": 1.4453535450385284, + "learning_rate": 0.0001436059069123253, + "loss": 1.4782, + "step": 5746 + }, + { + "epoch": 3.710135571336346, + "grad_norm": 1.3559335681048839, + "learning_rate": 0.0001435413598569186, + "loss": 1.6683, + "step": 5747 + }, + { + "epoch": 3.71078114912847, + "grad_norm": 1.536351164894177, + "learning_rate": 0.00014347681399965673, + "loss": 1.8836, + "step": 5748 + }, + { + "epoch": 3.711426726920594, + "grad_norm": 1.5096456568031296, + "learning_rate": 0.00014341226935251364, + "loss": 1.5797, + "step": 5749 + }, + { + "epoch": 3.7120723047127178, + "grad_norm": 1.4035446877657147, + "learning_rate": 0.00014334772592746297, + "loss": 1.7161, + "step": 5750 + }, + { + "epoch": 3.7127178825048417, + "grad_norm": 1.372215142322991, + "learning_rate": 0.00014328318373647814, + "loss": 1.4845, + "step": 5751 + }, + { + "epoch": 3.7133634602969656, + "grad_norm": 1.3920614753707143, + "learning_rate": 0.00014321864279153254, + "loss": 1.6559, + "step": 5752 + }, + { + "epoch": 3.71400903808909, + "grad_norm": 1.3142406550940542, + "learning_rate": 0.00014315410310459907, + "loss": 1.4187, + "step": 5753 + }, + { + "epoch": 3.714654615881214, + "grad_norm": 1.5308516012271114, + "learning_rate": 0.00014308956468765053, + "loss": 1.7177, + "step": 5754 + }, + { + "epoch": 3.715300193673338, + "grad_norm": 1.4306591598975147, + "learning_rate": 0.00014302502755265956, + "loss": 1.5849, + "step": 5755 + }, + { + "epoch": 3.715945771465462, + "grad_norm": 1.5406010191971933, + "learning_rate": 0.00014296049171159833, + "loss": 1.7092, + "step": 5756 + }, + { + "epoch": 3.7165913492575857, + "grad_norm": 1.2441052313068517, + "learning_rate": 0.00014289595717643897, + "loss": 1.5501, + "step": 5757 + }, + { + "epoch": 3.7172369270497096, + "grad_norm": 1.3576575895534964, + "learning_rate": 0.00014283142395915338, + "loss": 1.7625, + "step": 5758 + }, + { + "epoch": 3.7178825048418336, + "grad_norm": 1.3841610141770504, + "learning_rate": 0.000142766892071713, + "loss": 1.7928, + "step": 5759 + }, + { + "epoch": 3.7185280826339575, + "grad_norm": 1.4811966448061222, + "learning_rate": 0.00014270236152608926, + "loss": 1.678, + "step": 5760 + }, + { + "epoch": 3.7191736604260814, + "grad_norm": 1.4424135620891625, + "learning_rate": 0.0001426378323342532, + "loss": 1.6422, + "step": 5761 + }, + { + "epoch": 3.7198192382182054, + "grad_norm": 1.4631984942935128, + "learning_rate": 0.00014257330450817576, + "loss": 1.6695, + "step": 5762 + }, + { + "epoch": 3.7204648160103293, + "grad_norm": 1.6366621236066055, + "learning_rate": 0.00014250877805982735, + "loss": 1.7596, + "step": 5763 + }, + { + "epoch": 3.7211103938024532, + "grad_norm": 1.3747850754195197, + "learning_rate": 0.00014244425300117845, + "loss": 1.6962, + "step": 5764 + }, + { + "epoch": 3.721755971594577, + "grad_norm": 1.4345703730481827, + "learning_rate": 0.00014237972934419906, + "loss": 1.7452, + "step": 5765 + }, + { + "epoch": 3.722401549386701, + "grad_norm": 1.3884932532214294, + "learning_rate": 0.00014231520710085896, + "loss": 1.6696, + "step": 5766 + }, + { + "epoch": 3.723047127178825, + "grad_norm": 1.5535837562544108, + "learning_rate": 0.00014225068628312773, + "loss": 1.6345, + "step": 5767 + }, + { + "epoch": 3.723692704970949, + "grad_norm": 1.7103537232000492, + "learning_rate": 0.00014218616690297471, + "loss": 1.7723, + "step": 5768 + }, + { + "epoch": 3.724338282763073, + "grad_norm": 1.2227771844833368, + "learning_rate": 0.00014212164897236878, + "loss": 1.3599, + "step": 5769 + }, + { + "epoch": 3.724983860555197, + "grad_norm": 1.478440424215254, + "learning_rate": 0.00014205713250327881, + "loss": 1.6987, + "step": 5770 + }, + { + "epoch": 3.7256294383473207, + "grad_norm": 1.4560908733290139, + "learning_rate": 0.00014199261750767324, + "loss": 1.5813, + "step": 5771 + }, + { + "epoch": 3.7262750161394447, + "grad_norm": 1.256011524306493, + "learning_rate": 0.0001419281039975202, + "loss": 1.4312, + "step": 5772 + }, + { + "epoch": 3.7269205939315686, + "grad_norm": 1.7416753516797103, + "learning_rate": 0.00014186359198478768, + "loss": 1.7337, + "step": 5773 + }, + { + "epoch": 3.7275661717236925, + "grad_norm": 1.455104417740764, + "learning_rate": 0.00014179908148144338, + "loss": 1.6613, + "step": 5774 + }, + { + "epoch": 3.7282117495158165, + "grad_norm": 1.4385167962112486, + "learning_rate": 0.00014173457249945458, + "loss": 1.7012, + "step": 5775 + }, + { + "epoch": 3.7288573273079404, + "grad_norm": 1.4767090411115036, + "learning_rate": 0.00014167006505078837, + "loss": 1.6805, + "step": 5776 + }, + { + "epoch": 3.7295029051000643, + "grad_norm": 1.4393476927642443, + "learning_rate": 0.00014160555914741164, + "loss": 1.6807, + "step": 5777 + }, + { + "epoch": 3.7301484828921883, + "grad_norm": 1.4979005319212317, + "learning_rate": 0.0001415410548012908, + "loss": 1.5112, + "step": 5778 + }, + { + "epoch": 3.730794060684312, + "grad_norm": 1.3782795272665878, + "learning_rate": 0.0001414765520243921, + "loss": 1.665, + "step": 5779 + }, + { + "epoch": 3.7314396384764366, + "grad_norm": 1.3437596236261158, + "learning_rate": 0.00014141205082868154, + "loss": 1.4986, + "step": 5780 + }, + { + "epoch": 3.7320852162685605, + "grad_norm": 1.422390521056073, + "learning_rate": 0.00014134755122612467, + "loss": 1.721, + "step": 5781 + }, + { + "epoch": 3.7327307940606844, + "grad_norm": 1.5076247622130265, + "learning_rate": 0.00014128305322868692, + "loss": 1.6481, + "step": 5782 + }, + { + "epoch": 3.7333763718528084, + "grad_norm": 1.3960037695559324, + "learning_rate": 0.00014121855684833332, + "loss": 1.5755, + "step": 5783 + }, + { + "epoch": 3.7340219496449323, + "grad_norm": 1.5738352461059075, + "learning_rate": 0.0001411540620970285, + "loss": 1.7718, + "step": 5784 + }, + { + "epoch": 3.734667527437056, + "grad_norm": 1.6422082175763877, + "learning_rate": 0.0001410895689867371, + "loss": 1.6066, + "step": 5785 + }, + { + "epoch": 3.73531310522918, + "grad_norm": 1.3128392586066349, + "learning_rate": 0.00014102507752942307, + "loss": 1.3178, + "step": 5786 + }, + { + "epoch": 3.735958683021304, + "grad_norm": 1.5062394147806253, + "learning_rate": 0.00014096058773705044, + "loss": 1.5829, + "step": 5787 + }, + { + "epoch": 3.736604260813428, + "grad_norm": 1.598551952533611, + "learning_rate": 0.00014089609962158258, + "loss": 1.6917, + "step": 5788 + }, + { + "epoch": 3.737249838605552, + "grad_norm": 1.4685363948047792, + "learning_rate": 0.00014083161319498275, + "loss": 1.6201, + "step": 5789 + }, + { + "epoch": 3.737895416397676, + "grad_norm": 1.7029877059683114, + "learning_rate": 0.00014076712846921395, + "loss": 1.8504, + "step": 5790 + }, + { + "epoch": 3.7385409941898, + "grad_norm": 1.6172509573283982, + "learning_rate": 0.0001407026454562386, + "loss": 1.8732, + "step": 5791 + }, + { + "epoch": 3.7391865719819237, + "grad_norm": 1.4703432726684011, + "learning_rate": 0.00014063816416801905, + "loss": 1.8068, + "step": 5792 + }, + { + "epoch": 3.7398321497740477, + "grad_norm": 1.4467646020804892, + "learning_rate": 0.0001405736846165173, + "loss": 1.7392, + "step": 5793 + }, + { + "epoch": 3.7404777275661716, + "grad_norm": 1.5407284665467667, + "learning_rate": 0.00014050920681369484, + "loss": 1.6522, + "step": 5794 + }, + { + "epoch": 3.7411233053582955, + "grad_norm": 1.4608691574471506, + "learning_rate": 0.0001404447307715131, + "loss": 1.4184, + "step": 5795 + }, + { + "epoch": 3.74176888315042, + "grad_norm": 1.565690913159996, + "learning_rate": 0.000140380256501933, + "loss": 1.6974, + "step": 5796 + }, + { + "epoch": 3.742414460942544, + "grad_norm": 1.4303134127516741, + "learning_rate": 0.0001403157840169152, + "loss": 1.6635, + "step": 5797 + }, + { + "epoch": 3.7430600387346677, + "grad_norm": 1.5183969547803655, + "learning_rate": 0.00014025131332841997, + "loss": 1.7613, + "step": 5798 + }, + { + "epoch": 3.7437056165267917, + "grad_norm": 1.438728986974956, + "learning_rate": 0.0001401868444484074, + "loss": 1.6473, + "step": 5799 + }, + { + "epoch": 3.7443511943189156, + "grad_norm": 1.4078022449568617, + "learning_rate": 0.00014012237738883704, + "loss": 1.6656, + "step": 5800 + }, + { + "epoch": 3.7449967721110395, + "grad_norm": 1.4057230178940132, + "learning_rate": 0.0001400579121616682, + "loss": 1.733, + "step": 5801 + }, + { + "epoch": 3.7456423499031635, + "grad_norm": 1.4292845220729864, + "learning_rate": 0.00013999344877885995, + "loss": 1.8742, + "step": 5802 + }, + { + "epoch": 3.7462879276952874, + "grad_norm": 1.5853248992074807, + "learning_rate": 0.0001399289872523708, + "loss": 1.8692, + "step": 5803 + }, + { + "epoch": 3.7469335054874113, + "grad_norm": 1.5249734352453168, + "learning_rate": 0.00013986452759415905, + "loss": 1.7866, + "step": 5804 + }, + { + "epoch": 3.7475790832795353, + "grad_norm": 1.3899829400112145, + "learning_rate": 0.00013980006981618274, + "loss": 1.6158, + "step": 5805 + }, + { + "epoch": 3.748224661071659, + "grad_norm": 1.5350007975315043, + "learning_rate": 0.0001397356139303993, + "loss": 1.769, + "step": 5806 + }, + { + "epoch": 3.748870238863783, + "grad_norm": 1.333776563886259, + "learning_rate": 0.00013967115994876608, + "loss": 1.5739, + "step": 5807 + }, + { + "epoch": 3.749515816655907, + "grad_norm": 1.4385230244462561, + "learning_rate": 0.00013960670788323995, + "loss": 1.5994, + "step": 5808 + }, + { + "epoch": 3.750161394448031, + "grad_norm": 1.3639025564900384, + "learning_rate": 0.00013954225774577734, + "loss": 1.5886, + "step": 5809 + }, + { + "epoch": 3.750806972240155, + "grad_norm": 1.4889540160961932, + "learning_rate": 0.00013947780954833456, + "loss": 1.8211, + "step": 5810 + }, + { + "epoch": 3.751452550032279, + "grad_norm": 1.4097682717466458, + "learning_rate": 0.00013941336330286727, + "loss": 1.6426, + "step": 5811 + }, + { + "epoch": 3.7520981278244028, + "grad_norm": 1.344829812457423, + "learning_rate": 0.00013934891902133112, + "loss": 1.5298, + "step": 5812 + }, + { + "epoch": 3.7527437056165267, + "grad_norm": 1.3509733238215262, + "learning_rate": 0.000139284476715681, + "loss": 1.5595, + "step": 5813 + }, + { + "epoch": 3.7533892834086506, + "grad_norm": 1.2660849320276641, + "learning_rate": 0.00013922003639787174, + "loss": 1.5423, + "step": 5814 + }, + { + "epoch": 3.7540348612007746, + "grad_norm": 1.3473067189248775, + "learning_rate": 0.00013915559807985766, + "loss": 1.6903, + "step": 5815 + }, + { + "epoch": 3.7546804389928985, + "grad_norm": 1.3704598619566082, + "learning_rate": 0.00013909116177359266, + "loss": 1.5864, + "step": 5816 + }, + { + "epoch": 3.7553260167850224, + "grad_norm": 1.3837813069155935, + "learning_rate": 0.00013902672749103046, + "loss": 1.6501, + "step": 5817 + }, + { + "epoch": 3.7559715945771464, + "grad_norm": 1.3392152516684828, + "learning_rate": 0.00013896229524412428, + "loss": 1.6358, + "step": 5818 + }, + { + "epoch": 3.7566171723692703, + "grad_norm": 1.4730550289383553, + "learning_rate": 0.00013889786504482683, + "loss": 1.7506, + "step": 5819 + }, + { + "epoch": 3.757262750161394, + "grad_norm": 1.3265340690190628, + "learning_rate": 0.0001388334369050907, + "loss": 1.4943, + "step": 5820 + }, + { + "epoch": 3.757908327953518, + "grad_norm": 1.537173711539551, + "learning_rate": 0.00013876901083686806, + "loss": 1.7073, + "step": 5821 + }, + { + "epoch": 3.758553905745642, + "grad_norm": 1.4799476643549703, + "learning_rate": 0.00013870458685211044, + "loss": 1.7639, + "step": 5822 + }, + { + "epoch": 3.7591994835377665, + "grad_norm": 1.482586028607423, + "learning_rate": 0.0001386401649627692, + "loss": 1.7439, + "step": 5823 + }, + { + "epoch": 3.7598450613298904, + "grad_norm": 1.5642786992958213, + "learning_rate": 0.0001385757451807954, + "loss": 2.0631, + "step": 5824 + }, + { + "epoch": 3.7604906391220143, + "grad_norm": 1.3934864165132017, + "learning_rate": 0.00013851132751813942, + "loss": 1.4628, + "step": 5825 + }, + { + "epoch": 3.7611362169141382, + "grad_norm": 1.4206904531917757, + "learning_rate": 0.00013844691198675144, + "loss": 1.7877, + "step": 5826 + }, + { + "epoch": 3.761781794706262, + "grad_norm": 1.341385106223726, + "learning_rate": 0.0001383824985985813, + "loss": 1.6091, + "step": 5827 + }, + { + "epoch": 3.762427372498386, + "grad_norm": 1.3014638322177083, + "learning_rate": 0.00013831808736557827, + "loss": 1.5597, + "step": 5828 + }, + { + "epoch": 3.76307295029051, + "grad_norm": 1.4845109767243767, + "learning_rate": 0.00013825367829969126, + "loss": 1.6251, + "step": 5829 + }, + { + "epoch": 3.763718528082634, + "grad_norm": 1.3540361315119478, + "learning_rate": 0.00013818927141286897, + "loss": 1.5141, + "step": 5830 + }, + { + "epoch": 3.764364105874758, + "grad_norm": 1.278008219437413, + "learning_rate": 0.0001381248667170594, + "loss": 1.4702, + "step": 5831 + }, + { + "epoch": 3.765009683666882, + "grad_norm": 1.2670121394526863, + "learning_rate": 0.00013806046422421033, + "loss": 1.4274, + "step": 5832 + }, + { + "epoch": 3.7656552614590058, + "grad_norm": 1.4068599940349218, + "learning_rate": 0.0001379960639462692, + "loss": 1.8216, + "step": 5833 + }, + { + "epoch": 3.7663008392511297, + "grad_norm": 1.5161532487997147, + "learning_rate": 0.00013793166589518274, + "loss": 1.91, + "step": 5834 + }, + { + "epoch": 3.7669464170432536, + "grad_norm": 1.3550705364127846, + "learning_rate": 0.00013786727008289757, + "loss": 1.5744, + "step": 5835 + }, + { + "epoch": 3.7675919948353775, + "grad_norm": 1.369389966179837, + "learning_rate": 0.00013780287652135977, + "loss": 1.6571, + "step": 5836 + }, + { + "epoch": 3.7682375726275015, + "grad_norm": 1.4383624554809704, + "learning_rate": 0.00013773848522251507, + "loss": 1.6355, + "step": 5837 + }, + { + "epoch": 3.7688831504196254, + "grad_norm": 1.607034109934943, + "learning_rate": 0.00013767409619830858, + "loss": 1.6632, + "step": 5838 + }, + { + "epoch": 3.76952872821175, + "grad_norm": 1.4221145270483158, + "learning_rate": 0.00013760970946068529, + "loss": 1.713, + "step": 5839 + }, + { + "epoch": 3.7701743060038737, + "grad_norm": 1.4929004725081176, + "learning_rate": 0.00013754532502158954, + "loss": 1.8272, + "step": 5840 + }, + { + "epoch": 3.7708198837959976, + "grad_norm": 1.4476137827852837, + "learning_rate": 0.00013748094289296525, + "loss": 1.6193, + "step": 5841 + }, + { + "epoch": 3.7714654615881216, + "grad_norm": 1.4246049293189895, + "learning_rate": 0.00013741656308675603, + "loss": 1.6214, + "step": 5842 + }, + { + "epoch": 3.7721110393802455, + "grad_norm": 1.4326784466364428, + "learning_rate": 0.00013735218561490507, + "loss": 1.7188, + "step": 5843 + }, + { + "epoch": 3.7727566171723694, + "grad_norm": 1.422779803289343, + "learning_rate": 0.00013728781048935493, + "loss": 1.7168, + "step": 5844 + }, + { + "epoch": 3.7734021949644934, + "grad_norm": 1.440912118445076, + "learning_rate": 0.00013722343772204794, + "loss": 1.6178, + "step": 5845 + }, + { + "epoch": 3.7740477727566173, + "grad_norm": 1.4644859477741399, + "learning_rate": 0.00013715906732492594, + "loss": 1.8108, + "step": 5846 + }, + { + "epoch": 3.774693350548741, + "grad_norm": 1.5350512592913172, + "learning_rate": 0.00013709469930993026, + "loss": 1.812, + "step": 5847 + }, + { + "epoch": 3.775338928340865, + "grad_norm": 1.4344116607078021, + "learning_rate": 0.00013703033368900179, + "loss": 1.4901, + "step": 5848 + }, + { + "epoch": 3.775984506132989, + "grad_norm": 1.530257073448608, + "learning_rate": 0.00013696597047408118, + "loss": 1.5768, + "step": 5849 + }, + { + "epoch": 3.776630083925113, + "grad_norm": 1.5123290444178268, + "learning_rate": 0.00013690160967710831, + "loss": 1.701, + "step": 5850 + }, + { + "epoch": 3.777275661717237, + "grad_norm": 1.4893894539529187, + "learning_rate": 0.00013683725131002282, + "loss": 1.864, + "step": 5851 + }, + { + "epoch": 3.777921239509361, + "grad_norm": 1.3473850306246182, + "learning_rate": 0.00013677289538476393, + "loss": 1.6188, + "step": 5852 + }, + { + "epoch": 3.778566817301485, + "grad_norm": 1.3599555717797915, + "learning_rate": 0.00013670854191327023, + "loss": 1.7103, + "step": 5853 + }, + { + "epoch": 3.7792123950936087, + "grad_norm": 1.541660832384393, + "learning_rate": 0.00013664419090747998, + "loss": 1.7279, + "step": 5854 + }, + { + "epoch": 3.7798579728857327, + "grad_norm": 1.6054532648818445, + "learning_rate": 0.00013657984237933104, + "loss": 1.9521, + "step": 5855 + }, + { + "epoch": 3.7805035506778566, + "grad_norm": 1.4705742845285037, + "learning_rate": 0.00013651549634076062, + "loss": 1.6487, + "step": 5856 + }, + { + "epoch": 3.7811491284699805, + "grad_norm": 1.3586623595755634, + "learning_rate": 0.00013645115280370567, + "loss": 1.8007, + "step": 5857 + }, + { + "epoch": 3.7817947062621045, + "grad_norm": 1.2267639626307225, + "learning_rate": 0.0001363868117801026, + "loss": 1.3867, + "step": 5858 + }, + { + "epoch": 3.7824402840542284, + "grad_norm": 1.329226173742946, + "learning_rate": 0.00013632247328188715, + "loss": 1.4637, + "step": 5859 + }, + { + "epoch": 3.7830858618463523, + "grad_norm": 1.5079944342922529, + "learning_rate": 0.000136258137320995, + "loss": 1.627, + "step": 5860 + }, + { + "epoch": 3.7837314396384762, + "grad_norm": 1.419331453805438, + "learning_rate": 0.00013619380390936098, + "loss": 1.7472, + "step": 5861 + }, + { + "epoch": 3.7843770174306, + "grad_norm": 1.2750564716525266, + "learning_rate": 0.00013612947305891978, + "loss": 1.3791, + "step": 5862 + }, + { + "epoch": 3.785022595222724, + "grad_norm": 1.4230426730060617, + "learning_rate": 0.00013606514478160526, + "loss": 1.4968, + "step": 5863 + }, + { + "epoch": 3.785668173014848, + "grad_norm": 1.4863928311793506, + "learning_rate": 0.00013600081908935109, + "loss": 1.675, + "step": 5864 + }, + { + "epoch": 3.786313750806972, + "grad_norm": 1.4494943301980205, + "learning_rate": 0.00013593649599409036, + "loss": 1.8913, + "step": 5865 + }, + { + "epoch": 3.7869593285990963, + "grad_norm": 1.5290577320658063, + "learning_rate": 0.00013587217550775556, + "loss": 1.7881, + "step": 5866 + }, + { + "epoch": 3.7876049063912203, + "grad_norm": 1.601944770356184, + "learning_rate": 0.00013580785764227891, + "loss": 1.8285, + "step": 5867 + }, + { + "epoch": 3.788250484183344, + "grad_norm": 1.439830709196358, + "learning_rate": 0.00013574354240959206, + "loss": 1.6876, + "step": 5868 + }, + { + "epoch": 3.788896061975468, + "grad_norm": 1.458681158370713, + "learning_rate": 0.00013567922982162604, + "loss": 1.7827, + "step": 5869 + }, + { + "epoch": 3.789541639767592, + "grad_norm": 1.2617423884564385, + "learning_rate": 0.0001356149198903116, + "loss": 1.4084, + "step": 5870 + }, + { + "epoch": 3.790187217559716, + "grad_norm": 1.490902320449385, + "learning_rate": 0.00013555061262757892, + "loss": 1.7006, + "step": 5871 + }, + { + "epoch": 3.79083279535184, + "grad_norm": 1.4013439580656537, + "learning_rate": 0.00013548630804535757, + "loss": 1.5297, + "step": 5872 + }, + { + "epoch": 3.791478373143964, + "grad_norm": 1.3612093861620511, + "learning_rate": 0.00013542200615557673, + "loss": 1.5502, + "step": 5873 + }, + { + "epoch": 3.792123950936088, + "grad_norm": 1.643418523667358, + "learning_rate": 0.0001353577069701652, + "loss": 1.8086, + "step": 5874 + }, + { + "epoch": 3.7927695287282117, + "grad_norm": 1.4097094461075534, + "learning_rate": 0.000135293410501051, + "loss": 1.6517, + "step": 5875 + }, + { + "epoch": 3.7934151065203356, + "grad_norm": 1.6059326725568184, + "learning_rate": 0.0001352291167601618, + "loss": 1.7694, + "step": 5876 + }, + { + "epoch": 3.7940606843124596, + "grad_norm": 1.320857405488368, + "learning_rate": 0.00013516482575942488, + "loss": 1.4388, + "step": 5877 + }, + { + "epoch": 3.7947062621045835, + "grad_norm": 1.401486355544225, + "learning_rate": 0.00013510053751076675, + "loss": 1.5528, + "step": 5878 + }, + { + "epoch": 3.7953518398967074, + "grad_norm": 1.6957415816955412, + "learning_rate": 0.0001350362520261136, + "loss": 1.8676, + "step": 5879 + }, + { + "epoch": 3.7959974176888314, + "grad_norm": 1.4660022688985868, + "learning_rate": 0.00013497196931739112, + "loss": 1.5723, + "step": 5880 + }, + { + "epoch": 3.7966429954809553, + "grad_norm": 1.414065908763991, + "learning_rate": 0.00013490768939652427, + "loss": 1.544, + "step": 5881 + }, + { + "epoch": 3.7972885732730797, + "grad_norm": 1.3161042005736514, + "learning_rate": 0.0001348434122754378, + "loss": 1.565, + "step": 5882 + }, + { + "epoch": 3.7979341510652036, + "grad_norm": 1.2787614250287163, + "learning_rate": 0.0001347791379660557, + "loss": 1.4664, + "step": 5883 + }, + { + "epoch": 3.7985797288573275, + "grad_norm": 1.563464951390181, + "learning_rate": 0.00013471486648030152, + "loss": 1.6442, + "step": 5884 + }, + { + "epoch": 3.7992253066494515, + "grad_norm": 1.6043366777043542, + "learning_rate": 0.0001346505978300983, + "loss": 1.7895, + "step": 5885 + }, + { + "epoch": 3.7998708844415754, + "grad_norm": 1.3416121406850017, + "learning_rate": 0.00013458633202736853, + "loss": 1.7236, + "step": 5886 + }, + { + "epoch": 3.8005164622336993, + "grad_norm": 1.459568061225855, + "learning_rate": 0.0001345220690840343, + "loss": 1.7162, + "step": 5887 + }, + { + "epoch": 3.8011620400258233, + "grad_norm": 1.5700065900816478, + "learning_rate": 0.00013445780901201682, + "loss": 1.7619, + "step": 5888 + }, + { + "epoch": 3.801807617817947, + "grad_norm": 1.299779154889943, + "learning_rate": 0.0001343935518232372, + "loss": 1.4794, + "step": 5889 + }, + { + "epoch": 3.802453195610071, + "grad_norm": 1.4085676987822588, + "learning_rate": 0.0001343292975296158, + "loss": 1.6363, + "step": 5890 + }, + { + "epoch": 3.803098773402195, + "grad_norm": 1.3789823439440752, + "learning_rate": 0.00013426504614307233, + "loss": 1.5719, + "step": 5891 + }, + { + "epoch": 3.803744351194319, + "grad_norm": 1.4104533494262352, + "learning_rate": 0.0001342007976755262, + "loss": 1.5136, + "step": 5892 + }, + { + "epoch": 3.804389928986443, + "grad_norm": 1.3303319523932282, + "learning_rate": 0.00013413655213889616, + "loss": 1.4382, + "step": 5893 + }, + { + "epoch": 3.805035506778567, + "grad_norm": 1.4800871810629053, + "learning_rate": 0.00013407230954510035, + "loss": 1.6658, + "step": 5894 + }, + { + "epoch": 3.8056810845706908, + "grad_norm": 1.5155756281705417, + "learning_rate": 0.00013400806990605647, + "loss": 1.6526, + "step": 5895 + }, + { + "epoch": 3.8063266623628147, + "grad_norm": 1.5558957174853891, + "learning_rate": 0.00013394383323368173, + "loss": 1.7718, + "step": 5896 + }, + { + "epoch": 3.8069722401549386, + "grad_norm": 1.3061245932298031, + "learning_rate": 0.00013387959953989258, + "loss": 1.5131, + "step": 5897 + }, + { + "epoch": 3.8076178179470626, + "grad_norm": 1.4391481123748657, + "learning_rate": 0.00013381536883660508, + "loss": 1.6772, + "step": 5898 + }, + { + "epoch": 3.8082633957391865, + "grad_norm": 1.5078765088403474, + "learning_rate": 0.00013375114113573473, + "loss": 1.7993, + "step": 5899 + }, + { + "epoch": 3.8089089735313104, + "grad_norm": 1.449576167646823, + "learning_rate": 0.0001336869164491964, + "loss": 1.6311, + "step": 5900 + }, + { + "epoch": 3.8095545513234343, + "grad_norm": 1.378939769915335, + "learning_rate": 0.00013362269478890438, + "loss": 1.7338, + "step": 5901 + }, + { + "epoch": 3.8102001291155583, + "grad_norm": 1.6460765017382848, + "learning_rate": 0.0001335584761667726, + "loss": 1.732, + "step": 5902 + }, + { + "epoch": 3.810845706907682, + "grad_norm": 1.5545557106403605, + "learning_rate": 0.0001334942605947141, + "loss": 1.7012, + "step": 5903 + }, + { + "epoch": 3.811491284699806, + "grad_norm": 1.4264849900033552, + "learning_rate": 0.0001334300480846417, + "loss": 1.6905, + "step": 5904 + }, + { + "epoch": 3.81213686249193, + "grad_norm": 1.3637087159705625, + "learning_rate": 0.00013336583864846744, + "loss": 1.5383, + "step": 5905 + }, + { + "epoch": 3.812782440284054, + "grad_norm": 1.4776705080468724, + "learning_rate": 0.00013330163229810274, + "loss": 1.6063, + "step": 5906 + }, + { + "epoch": 3.813428018076178, + "grad_norm": 1.4499576504207978, + "learning_rate": 0.00013323742904545864, + "loss": 1.8302, + "step": 5907 + }, + { + "epoch": 3.814073595868302, + "grad_norm": 1.4286327897341866, + "learning_rate": 0.00013317322890244552, + "loss": 1.6692, + "step": 5908 + }, + { + "epoch": 3.8147191736604262, + "grad_norm": 1.6535274790440642, + "learning_rate": 0.00013310903188097308, + "loss": 1.845, + "step": 5909 + }, + { + "epoch": 3.81536475145255, + "grad_norm": 1.4363128821544708, + "learning_rate": 0.00013304483799295066, + "loss": 1.6572, + "step": 5910 + }, + { + "epoch": 3.816010329244674, + "grad_norm": 1.4322970501644363, + "learning_rate": 0.00013298064725028678, + "loss": 1.5949, + "step": 5911 + }, + { + "epoch": 3.816655907036798, + "grad_norm": 1.3953887193816676, + "learning_rate": 0.00013291645966488955, + "loss": 1.6387, + "step": 5912 + }, + { + "epoch": 3.817301484828922, + "grad_norm": 1.5521086140180271, + "learning_rate": 0.00013285227524866642, + "loss": 1.7988, + "step": 5913 + }, + { + "epoch": 3.817947062621046, + "grad_norm": 1.3674326315992091, + "learning_rate": 0.00013278809401352426, + "loss": 1.5224, + "step": 5914 + }, + { + "epoch": 3.81859264041317, + "grad_norm": 1.467383115414658, + "learning_rate": 0.00013272391597136943, + "loss": 1.6032, + "step": 5915 + }, + { + "epoch": 3.8192382182052937, + "grad_norm": 1.50295824711809, + "learning_rate": 0.00013265974113410746, + "loss": 1.6537, + "step": 5916 + }, + { + "epoch": 3.8198837959974177, + "grad_norm": 1.3311906657043593, + "learning_rate": 0.00013259556951364357, + "loss": 1.5274, + "step": 5917 + }, + { + "epoch": 3.8205293737895416, + "grad_norm": 1.427614807507017, + "learning_rate": 0.00013253140112188231, + "loss": 1.7238, + "step": 5918 + }, + { + "epoch": 3.8211749515816655, + "grad_norm": 1.4114265976911424, + "learning_rate": 0.00013246723597072743, + "loss": 1.6764, + "step": 5919 + }, + { + "epoch": 3.8218205293737895, + "grad_norm": 1.4938137256742583, + "learning_rate": 0.00013240307407208228, + "loss": 1.8367, + "step": 5920 + }, + { + "epoch": 3.8224661071659134, + "grad_norm": 1.4275872815176711, + "learning_rate": 0.00013233891543784968, + "loss": 1.6343, + "step": 5921 + }, + { + "epoch": 3.8231116849580373, + "grad_norm": 1.5535527939299592, + "learning_rate": 0.0001322747600799316, + "loss": 1.6664, + "step": 5922 + }, + { + "epoch": 3.8237572627501613, + "grad_norm": 1.430340110060442, + "learning_rate": 0.00013221060801022952, + "loss": 1.5466, + "step": 5923 + }, + { + "epoch": 3.824402840542285, + "grad_norm": 1.6310021010397402, + "learning_rate": 0.0001321464592406444, + "loss": 1.7432, + "step": 5924 + }, + { + "epoch": 3.8250484183344096, + "grad_norm": 1.5969022473010166, + "learning_rate": 0.00013208231378307643, + "loss": 1.4732, + "step": 5925 + }, + { + "epoch": 3.8256939961265335, + "grad_norm": 1.5700281221412744, + "learning_rate": 0.00013201817164942525, + "loss": 1.679, + "step": 5926 + }, + { + "epoch": 3.8263395739186574, + "grad_norm": 1.4523960440202428, + "learning_rate": 0.00013195403285158998, + "loss": 1.5369, + "step": 5927 + }, + { + "epoch": 3.8269851517107814, + "grad_norm": 1.5258410080562286, + "learning_rate": 0.00013188989740146894, + "loss": 1.6503, + "step": 5928 + }, + { + "epoch": 3.8276307295029053, + "grad_norm": 1.4762985071106902, + "learning_rate": 0.00013182576531095997, + "loss": 1.7225, + "step": 5929 + }, + { + "epoch": 3.828276307295029, + "grad_norm": 1.4907903381274257, + "learning_rate": 0.00013176163659196026, + "loss": 1.6036, + "step": 5930 + }, + { + "epoch": 3.828921885087153, + "grad_norm": 1.30959081850688, + "learning_rate": 0.00013169751125636627, + "loss": 1.3545, + "step": 5931 + }, + { + "epoch": 3.829567462879277, + "grad_norm": 1.6869569941181402, + "learning_rate": 0.00013163338931607399, + "loss": 1.6856, + "step": 5932 + }, + { + "epoch": 3.830213040671401, + "grad_norm": 1.4640379384649025, + "learning_rate": 0.00013156927078297872, + "loss": 1.7016, + "step": 5933 + }, + { + "epoch": 3.830858618463525, + "grad_norm": 1.4865540279118743, + "learning_rate": 0.000131505155668975, + "loss": 1.6724, + "step": 5934 + }, + { + "epoch": 3.831504196255649, + "grad_norm": 1.4051095771744662, + "learning_rate": 0.00013144104398595696, + "loss": 1.6327, + "step": 5935 + }, + { + "epoch": 3.832149774047773, + "grad_norm": 1.3543526506176264, + "learning_rate": 0.0001313769357458179, + "loss": 1.5305, + "step": 5936 + }, + { + "epoch": 3.8327953518398967, + "grad_norm": 1.4120948930217896, + "learning_rate": 0.00013131283096045072, + "loss": 1.7549, + "step": 5937 + }, + { + "epoch": 3.8334409296320207, + "grad_norm": 1.4241106301488504, + "learning_rate": 0.00013124872964174732, + "loss": 1.5654, + "step": 5938 + }, + { + "epoch": 3.8340865074241446, + "grad_norm": 1.517629094120721, + "learning_rate": 0.00013118463180159927, + "loss": 1.7605, + "step": 5939 + }, + { + "epoch": 3.8347320852162685, + "grad_norm": 1.4244011236470446, + "learning_rate": 0.00013112053745189744, + "loss": 1.6442, + "step": 5940 + }, + { + "epoch": 3.8353776630083924, + "grad_norm": 1.5743190344445714, + "learning_rate": 0.00013105644660453184, + "loss": 1.6181, + "step": 5941 + }, + { + "epoch": 3.8360232408005164, + "grad_norm": 1.4216532016851309, + "learning_rate": 0.00013099235927139212, + "loss": 1.6245, + "step": 5942 + }, + { + "epoch": 3.8366688185926403, + "grad_norm": 1.3270585515586615, + "learning_rate": 0.00013092827546436713, + "loss": 1.5283, + "step": 5943 + }, + { + "epoch": 3.8373143963847642, + "grad_norm": 1.2288229609066967, + "learning_rate": 0.00013086419519534498, + "loss": 1.3958, + "step": 5944 + }, + { + "epoch": 3.837959974176888, + "grad_norm": 1.4307597078847534, + "learning_rate": 0.0001308001184762133, + "loss": 1.6994, + "step": 5945 + }, + { + "epoch": 3.838605551969012, + "grad_norm": 1.400675641447555, + "learning_rate": 0.00013073604531885905, + "loss": 1.5675, + "step": 5946 + }, + { + "epoch": 3.839251129761136, + "grad_norm": 1.5472439124664306, + "learning_rate": 0.00013067197573516837, + "loss": 1.8787, + "step": 5947 + }, + { + "epoch": 3.83989670755326, + "grad_norm": 1.6420077027128783, + "learning_rate": 0.00013060790973702682, + "loss": 1.9641, + "step": 5948 + }, + { + "epoch": 3.840542285345384, + "grad_norm": 1.606887869724036, + "learning_rate": 0.00013054384733631944, + "loss": 1.7469, + "step": 5949 + }, + { + "epoch": 3.841187863137508, + "grad_norm": 1.6861693838758298, + "learning_rate": 0.00013047978854493033, + "loss": 1.8455, + "step": 5950 + }, + { + "epoch": 3.8418334409296317, + "grad_norm": 1.5045516693215684, + "learning_rate": 0.00013041573337474309, + "loss": 1.702, + "step": 5951 + }, + { + "epoch": 3.842479018721756, + "grad_norm": 1.4700717319260588, + "learning_rate": 0.00013035168183764067, + "loss": 1.7593, + "step": 5952 + }, + { + "epoch": 3.84312459651388, + "grad_norm": 1.6446704977467594, + "learning_rate": 0.00013028763394550526, + "loss": 1.6437, + "step": 5953 + }, + { + "epoch": 3.843770174306004, + "grad_norm": 1.5248471224048195, + "learning_rate": 0.0001302235897102184, + "loss": 1.9572, + "step": 5954 + }, + { + "epoch": 3.844415752098128, + "grad_norm": 1.4416240934843594, + "learning_rate": 0.00013015954914366102, + "loss": 1.7615, + "step": 5955 + }, + { + "epoch": 3.845061329890252, + "grad_norm": 1.5382617703979071, + "learning_rate": 0.0001300955122577132, + "loss": 1.7808, + "step": 5956 + }, + { + "epoch": 3.8457069076823758, + "grad_norm": 1.470605325122018, + "learning_rate": 0.00013003147906425456, + "loss": 1.61, + "step": 5957 + }, + { + "epoch": 3.8463524854744997, + "grad_norm": 1.5388547055326995, + "learning_rate": 0.00012996744957516392, + "loss": 1.685, + "step": 5958 + }, + { + "epoch": 3.8469980632666236, + "grad_norm": 1.5311303130353944, + "learning_rate": 0.00012990342380231928, + "loss": 1.7749, + "step": 5959 + }, + { + "epoch": 3.8476436410587476, + "grad_norm": 1.6891069160150038, + "learning_rate": 0.00012983940175759822, + "loss": 1.8231, + "step": 5960 + }, + { + "epoch": 3.8482892188508715, + "grad_norm": 1.312345467535738, + "learning_rate": 0.0001297753834528774, + "loss": 1.5466, + "step": 5961 + }, + { + "epoch": 3.8489347966429954, + "grad_norm": 1.356814647209922, + "learning_rate": 0.00012971136890003302, + "loss": 1.6487, + "step": 5962 + }, + { + "epoch": 3.8495803744351194, + "grad_norm": 1.2349831303365109, + "learning_rate": 0.00012964735811094028, + "loss": 1.3674, + "step": 5963 + }, + { + "epoch": 3.8502259522272433, + "grad_norm": 1.3146655906251123, + "learning_rate": 0.00012958335109747394, + "loss": 1.5093, + "step": 5964 + }, + { + "epoch": 3.850871530019367, + "grad_norm": 1.3738940126864376, + "learning_rate": 0.00012951934787150797, + "loss": 1.4777, + "step": 5965 + }, + { + "epoch": 3.851517107811491, + "grad_norm": 1.4427509229407613, + "learning_rate": 0.00012945534844491558, + "loss": 1.7821, + "step": 5966 + }, + { + "epoch": 3.852162685603615, + "grad_norm": 1.513583515366663, + "learning_rate": 0.00012939135282956935, + "loss": 1.6522, + "step": 5967 + }, + { + "epoch": 3.8528082633957395, + "grad_norm": 1.551496604466164, + "learning_rate": 0.0001293273610373412, + "loss": 1.7398, + "step": 5968 + }, + { + "epoch": 3.8534538411878634, + "grad_norm": 1.470184834028088, + "learning_rate": 0.00012926337308010214, + "loss": 1.6048, + "step": 5969 + }, + { + "epoch": 3.8540994189799873, + "grad_norm": 1.2062249060662862, + "learning_rate": 0.00012919938896972268, + "loss": 1.3437, + "step": 5970 + }, + { + "epoch": 3.8547449967721112, + "grad_norm": 1.4949604878895373, + "learning_rate": 0.00012913540871807257, + "loss": 1.6464, + "step": 5971 + }, + { + "epoch": 3.855390574564235, + "grad_norm": 1.4017575272395992, + "learning_rate": 0.00012907143233702073, + "loss": 1.6848, + "step": 5972 + }, + { + "epoch": 3.856036152356359, + "grad_norm": 1.4403652724148144, + "learning_rate": 0.00012900745983843547, + "loss": 1.6518, + "step": 5973 + }, + { + "epoch": 3.856681730148483, + "grad_norm": 1.7074766681508002, + "learning_rate": 0.00012894349123418442, + "loss": 1.7077, + "step": 5974 + }, + { + "epoch": 3.857327307940607, + "grad_norm": 1.4475694071561356, + "learning_rate": 0.00012887952653613434, + "loss": 1.5647, + "step": 5975 + }, + { + "epoch": 3.857972885732731, + "grad_norm": 1.462497937561849, + "learning_rate": 0.00012881556575615132, + "loss": 1.5711, + "step": 5976 + }, + { + "epoch": 3.858618463524855, + "grad_norm": 1.328047087269485, + "learning_rate": 0.0001287516089061009, + "loss": 1.48, + "step": 5977 + }, + { + "epoch": 3.8592640413169788, + "grad_norm": 1.3945073753242572, + "learning_rate": 0.00012868765599784754, + "loss": 1.4429, + "step": 5978 + }, + { + "epoch": 3.8599096191091027, + "grad_norm": 1.8357555277085291, + "learning_rate": 0.0001286237070432553, + "loss": 1.6494, + "step": 5979 + }, + { + "epoch": 3.8605551969012266, + "grad_norm": 1.4154936785058132, + "learning_rate": 0.00012855976205418737, + "loss": 1.4225, + "step": 5980 + }, + { + "epoch": 3.8612007746933505, + "grad_norm": 1.2324033739473508, + "learning_rate": 0.0001284958210425061, + "loss": 1.2431, + "step": 5981 + }, + { + "epoch": 3.8618463524854745, + "grad_norm": 1.5184051974405441, + "learning_rate": 0.00012843188402007335, + "loss": 1.5884, + "step": 5982 + }, + { + "epoch": 3.8624919302775984, + "grad_norm": 1.446651608271782, + "learning_rate": 0.0001283679509987501, + "loss": 1.5049, + "step": 5983 + }, + { + "epoch": 3.8631375080697223, + "grad_norm": 1.6312339755856164, + "learning_rate": 0.00012830402199039644, + "loss": 1.6701, + "step": 5984 + }, + { + "epoch": 3.8637830858618463, + "grad_norm": 1.5964869860349555, + "learning_rate": 0.00012824009700687197, + "loss": 1.651, + "step": 5985 + }, + { + "epoch": 3.86442866365397, + "grad_norm": 1.6176150337664734, + "learning_rate": 0.00012817617606003542, + "loss": 1.7529, + "step": 5986 + }, + { + "epoch": 3.865074241446094, + "grad_norm": 1.4801851949265579, + "learning_rate": 0.00012811225916174485, + "loss": 1.7455, + "step": 5987 + }, + { + "epoch": 3.865719819238218, + "grad_norm": 1.6454363284734, + "learning_rate": 0.00012804834632385741, + "loss": 1.948, + "step": 5988 + }, + { + "epoch": 3.866365397030342, + "grad_norm": 1.4587822913280688, + "learning_rate": 0.00012798443755822965, + "loss": 1.8375, + "step": 5989 + }, + { + "epoch": 3.867010974822466, + "grad_norm": 1.5723526223551192, + "learning_rate": 0.00012792053287671732, + "loss": 1.8189, + "step": 5990 + }, + { + "epoch": 3.86765655261459, + "grad_norm": 1.4911936634816094, + "learning_rate": 0.00012785663229117534, + "loss": 1.8174, + "step": 5991 + }, + { + "epoch": 3.868302130406714, + "grad_norm": 1.397018424606908, + "learning_rate": 0.000127792735813458, + "loss": 1.6332, + "step": 5992 + }, + { + "epoch": 3.8689477081988377, + "grad_norm": 1.3965039007953097, + "learning_rate": 0.00012772884345541876, + "loss": 1.6127, + "step": 5993 + }, + { + "epoch": 3.8695932859909616, + "grad_norm": 1.3265800204134348, + "learning_rate": 0.0001276649552289102, + "loss": 1.6156, + "step": 5994 + }, + { + "epoch": 3.870238863783086, + "grad_norm": 1.2202049239445212, + "learning_rate": 0.0001276010711457844, + "loss": 1.5213, + "step": 5995 + }, + { + "epoch": 3.87088444157521, + "grad_norm": 1.6478964775056668, + "learning_rate": 0.00012753719121789245, + "loss": 1.8241, + "step": 5996 + }, + { + "epoch": 3.871530019367334, + "grad_norm": 1.383695008472204, + "learning_rate": 0.00012747331545708473, + "loss": 1.6918, + "step": 5997 + }, + { + "epoch": 3.872175597159458, + "grad_norm": 1.6805243514322037, + "learning_rate": 0.00012740944387521088, + "loss": 1.7066, + "step": 5998 + }, + { + "epoch": 3.8728211749515817, + "grad_norm": 1.4435089557227383, + "learning_rate": 0.00012734557648411978, + "loss": 1.7292, + "step": 5999 + }, + { + "epoch": 3.8734667527437057, + "grad_norm": 1.461091210440846, + "learning_rate": 0.0001272817132956594, + "loss": 1.7755, + "step": 6000 + }, + { + "epoch": 3.8741123305358296, + "grad_norm": 1.4279204680164113, + "learning_rate": 0.0001272178543216771, + "loss": 1.6455, + "step": 6001 + }, + { + "epoch": 3.8747579083279535, + "grad_norm": 1.4602967705705585, + "learning_rate": 0.00012715399957401936, + "loss": 1.6932, + "step": 6002 + }, + { + "epoch": 3.8754034861200775, + "grad_norm": 1.3960389444450119, + "learning_rate": 0.00012709014906453185, + "loss": 1.6312, + "step": 6003 + }, + { + "epoch": 3.8760490639122014, + "grad_norm": 1.5207172642009772, + "learning_rate": 0.0001270263028050596, + "loss": 1.7761, + "step": 6004 + }, + { + "epoch": 3.8766946417043253, + "grad_norm": 1.460863539177173, + "learning_rate": 0.00012696246080744672, + "loss": 1.6797, + "step": 6005 + }, + { + "epoch": 3.8773402194964492, + "grad_norm": 1.4997126523353217, + "learning_rate": 0.00012689862308353644, + "loss": 1.674, + "step": 6006 + }, + { + "epoch": 3.877985797288573, + "grad_norm": 1.7192983751644662, + "learning_rate": 0.00012683478964517153, + "loss": 1.6186, + "step": 6007 + }, + { + "epoch": 3.878631375080697, + "grad_norm": 1.461270666828723, + "learning_rate": 0.00012677096050419366, + "loss": 1.5304, + "step": 6008 + }, + { + "epoch": 3.879276952872821, + "grad_norm": 1.3292014967372043, + "learning_rate": 0.0001267071356724437, + "loss": 1.2754, + "step": 6009 + }, + { + "epoch": 3.879922530664945, + "grad_norm": 1.660492368641883, + "learning_rate": 0.0001266433151617619, + "loss": 1.6779, + "step": 6010 + }, + { + "epoch": 3.8805681084570693, + "grad_norm": 1.339385342366256, + "learning_rate": 0.00012657949898398774, + "loss": 1.6213, + "step": 6011 + }, + { + "epoch": 3.8812136862491933, + "grad_norm": 1.4498153610048987, + "learning_rate": 0.00012651568715095965, + "loss": 1.5704, + "step": 6012 + }, + { + "epoch": 3.881859264041317, + "grad_norm": 1.544421598643708, + "learning_rate": 0.00012645187967451536, + "loss": 1.9693, + "step": 6013 + }, + { + "epoch": 3.882504841833441, + "grad_norm": 1.5346133182136479, + "learning_rate": 0.00012638807656649195, + "loss": 1.9178, + "step": 6014 + }, + { + "epoch": 3.883150419625565, + "grad_norm": 1.6124411777655385, + "learning_rate": 0.00012632427783872547, + "loss": 1.5324, + "step": 6015 + }, + { + "epoch": 3.883795997417689, + "grad_norm": 1.5125152391378975, + "learning_rate": 0.0001262604835030512, + "loss": 1.7355, + "step": 6016 + }, + { + "epoch": 3.884441575209813, + "grad_norm": 1.347413140508481, + "learning_rate": 0.0001261966935713038, + "loss": 1.4241, + "step": 6017 + }, + { + "epoch": 3.885087153001937, + "grad_norm": 1.5336855995042042, + "learning_rate": 0.00012613290805531688, + "loss": 1.8517, + "step": 6018 + }, + { + "epoch": 3.885732730794061, + "grad_norm": 1.536988897077945, + "learning_rate": 0.00012606912696692322, + "loss": 1.637, + "step": 6019 + }, + { + "epoch": 3.8863783085861847, + "grad_norm": 1.4147044994094142, + "learning_rate": 0.000126005350317955, + "loss": 1.7063, + "step": 6020 + }, + { + "epoch": 3.8870238863783086, + "grad_norm": 1.3342468971533448, + "learning_rate": 0.00012594157812024348, + "loss": 1.636, + "step": 6021 + }, + { + "epoch": 3.8876694641704326, + "grad_norm": 1.2810501404999457, + "learning_rate": 0.00012587781038561898, + "loss": 1.3639, + "step": 6022 + }, + { + "epoch": 3.8883150419625565, + "grad_norm": 1.454038568357559, + "learning_rate": 0.00012581404712591103, + "loss": 1.6714, + "step": 6023 + }, + { + "epoch": 3.8889606197546804, + "grad_norm": 1.5199321945367223, + "learning_rate": 0.00012575028835294855, + "loss": 1.7004, + "step": 6024 + }, + { + "epoch": 3.8896061975468044, + "grad_norm": 1.6674711319785451, + "learning_rate": 0.0001256865340785593, + "loss": 1.9205, + "step": 6025 + }, + { + "epoch": 3.8902517753389283, + "grad_norm": 1.575205626413164, + "learning_rate": 0.0001256227843145704, + "loss": 1.7333, + "step": 6026 + }, + { + "epoch": 3.8908973531310522, + "grad_norm": 1.421183224118848, + "learning_rate": 0.00012555903907280814, + "loss": 1.6716, + "step": 6027 + }, + { + "epoch": 3.891542930923176, + "grad_norm": 1.416048131304663, + "learning_rate": 0.00012549529836509784, + "loss": 1.7044, + "step": 6028 + }, + { + "epoch": 3.8921885087153, + "grad_norm": 1.7322839957765384, + "learning_rate": 0.00012543156220326415, + "loss": 1.932, + "step": 6029 + }, + { + "epoch": 3.892834086507424, + "grad_norm": 1.5040140216858215, + "learning_rate": 0.00012536783059913077, + "loss": 1.8366, + "step": 6030 + }, + { + "epoch": 3.893479664299548, + "grad_norm": 1.2917693484098105, + "learning_rate": 0.0001253041035645205, + "loss": 1.5287, + "step": 6031 + }, + { + "epoch": 3.894125242091672, + "grad_norm": 1.361486043369014, + "learning_rate": 0.00012524038111125544, + "loss": 1.7146, + "step": 6032 + }, + { + "epoch": 3.894770819883796, + "grad_norm": 1.3967190465498789, + "learning_rate": 0.0001251766632511568, + "loss": 1.6685, + "step": 6033 + }, + { + "epoch": 3.8954163976759197, + "grad_norm": 1.4482197581297105, + "learning_rate": 0.00012511294999604478, + "loss": 1.5511, + "step": 6034 + }, + { + "epoch": 3.8960619754680437, + "grad_norm": 1.4651421313851856, + "learning_rate": 0.00012504924135773893, + "loss": 1.6302, + "step": 6035 + }, + { + "epoch": 3.8967075532601676, + "grad_norm": 1.3782771636489926, + "learning_rate": 0.00012498553734805788, + "loss": 1.5969, + "step": 6036 + }, + { + "epoch": 3.8973531310522915, + "grad_norm": 1.5582001573467874, + "learning_rate": 0.0001249218379788194, + "loss": 1.7095, + "step": 6037 + }, + { + "epoch": 3.897998708844416, + "grad_norm": 1.4139308667004358, + "learning_rate": 0.00012485814326184026, + "loss": 1.712, + "step": 6038 + }, + { + "epoch": 3.89864428663654, + "grad_norm": 1.6429938243219098, + "learning_rate": 0.00012479445320893665, + "loss": 1.8457, + "step": 6039 + }, + { + "epoch": 3.8992898644286638, + "grad_norm": 1.5994716261896063, + "learning_rate": 0.00012473076783192367, + "loss": 1.7629, + "step": 6040 + }, + { + "epoch": 3.8999354422207877, + "grad_norm": 1.5516269924649124, + "learning_rate": 0.00012466708714261557, + "loss": 1.8781, + "step": 6041 + }, + { + "epoch": 3.9005810200129116, + "grad_norm": 1.5878736963111246, + "learning_rate": 0.00012460341115282586, + "loss": 1.9537, + "step": 6042 + }, + { + "epoch": 3.9012265978050356, + "grad_norm": 1.5655970364400003, + "learning_rate": 0.00012453973987436705, + "loss": 1.7829, + "step": 6043 + }, + { + "epoch": 3.9018721755971595, + "grad_norm": 1.2700447057507591, + "learning_rate": 0.0001244760733190508, + "loss": 1.5818, + "step": 6044 + }, + { + "epoch": 3.9025177533892834, + "grad_norm": 1.5603590719714406, + "learning_rate": 0.00012441241149868795, + "loss": 2.0183, + "step": 6045 + }, + { + "epoch": 3.9031633311814073, + "grad_norm": 1.3839627821684322, + "learning_rate": 0.00012434875442508846, + "loss": 1.4497, + "step": 6046 + }, + { + "epoch": 3.9038089089735313, + "grad_norm": 1.2866262698791202, + "learning_rate": 0.00012428510211006137, + "loss": 1.4031, + "step": 6047 + }, + { + "epoch": 3.904454486765655, + "grad_norm": 1.5098772955073216, + "learning_rate": 0.00012422145456541476, + "loss": 1.6149, + "step": 6048 + }, + { + "epoch": 3.905100064557779, + "grad_norm": 1.4251440058412106, + "learning_rate": 0.00012415781180295603, + "loss": 1.5057, + "step": 6049 + }, + { + "epoch": 3.905745642349903, + "grad_norm": 1.4812556815527869, + "learning_rate": 0.00012409417383449147, + "loss": 1.7208, + "step": 6050 + }, + { + "epoch": 3.906391220142027, + "grad_norm": 1.4416696656291204, + "learning_rate": 0.00012403054067182664, + "loss": 1.6762, + "step": 6051 + }, + { + "epoch": 3.907036797934151, + "grad_norm": 1.5132380213307994, + "learning_rate": 0.00012396691232676617, + "loss": 1.7104, + "step": 6052 + }, + { + "epoch": 3.907682375726275, + "grad_norm": 1.4163798782207617, + "learning_rate": 0.0001239032888111137, + "loss": 1.6362, + "step": 6053 + }, + { + "epoch": 3.9083279535183992, + "grad_norm": 1.2643506467519616, + "learning_rate": 0.0001238396701366721, + "loss": 1.393, + "step": 6054 + }, + { + "epoch": 3.908973531310523, + "grad_norm": 1.3501909910714864, + "learning_rate": 0.00012377605631524338, + "loss": 1.4762, + "step": 6055 + }, + { + "epoch": 3.909619109102647, + "grad_norm": 1.4044146419916623, + "learning_rate": 0.0001237124473586284, + "loss": 1.3373, + "step": 6056 + }, + { + "epoch": 3.910264686894771, + "grad_norm": 1.2119066844621138, + "learning_rate": 0.0001236488432786274, + "loss": 1.4375, + "step": 6057 + }, + { + "epoch": 3.910910264686895, + "grad_norm": 1.4734424342415444, + "learning_rate": 0.0001235852440870396, + "loss": 1.6204, + "step": 6058 + }, + { + "epoch": 3.911555842479019, + "grad_norm": 1.8388104789746755, + "learning_rate": 0.00012352164979566318, + "loss": 1.8949, + "step": 6059 + }, + { + "epoch": 3.912201420271143, + "grad_norm": 1.475823606612044, + "learning_rate": 0.00012345806041629565, + "loss": 1.6399, + "step": 6060 + }, + { + "epoch": 3.9128469980632667, + "grad_norm": 1.4911367614017288, + "learning_rate": 0.0001233944759607335, + "loss": 1.6016, + "step": 6061 + }, + { + "epoch": 3.9134925758553907, + "grad_norm": 1.5118799323787628, + "learning_rate": 0.00012333089644077233, + "loss": 1.8433, + "step": 6062 + }, + { + "epoch": 3.9141381536475146, + "grad_norm": 1.5422720443732352, + "learning_rate": 0.0001232673218682067, + "loss": 1.7281, + "step": 6063 + }, + { + "epoch": 3.9147837314396385, + "grad_norm": 1.2630343389130796, + "learning_rate": 0.00012320375225483045, + "loss": 1.3727, + "step": 6064 + }, + { + "epoch": 3.9154293092317625, + "grad_norm": 1.3679389594867737, + "learning_rate": 0.0001231401876124364, + "loss": 1.4129, + "step": 6065 + }, + { + "epoch": 3.9160748870238864, + "grad_norm": 1.4211872777548327, + "learning_rate": 0.00012307662795281633, + "loss": 1.6745, + "step": 6066 + }, + { + "epoch": 3.9167204648160103, + "grad_norm": 1.4357898115192003, + "learning_rate": 0.00012301307328776138, + "loss": 1.6061, + "step": 6067 + }, + { + "epoch": 3.9173660426081343, + "grad_norm": 1.3857844591176385, + "learning_rate": 0.00012294952362906152, + "loss": 1.6399, + "step": 6068 + }, + { + "epoch": 3.918011620400258, + "grad_norm": 1.3375783007779383, + "learning_rate": 0.00012288597898850584, + "loss": 1.4722, + "step": 6069 + }, + { + "epoch": 3.918657198192382, + "grad_norm": 1.5626324653207857, + "learning_rate": 0.00012282243937788254, + "loss": 2.0248, + "step": 6070 + }, + { + "epoch": 3.919302775984506, + "grad_norm": 1.3892433200043457, + "learning_rate": 0.00012275890480897898, + "loss": 1.6909, + "step": 6071 + }, + { + "epoch": 3.91994835377663, + "grad_norm": 1.5530417744155864, + "learning_rate": 0.00012269537529358137, + "loss": 1.8191, + "step": 6072 + }, + { + "epoch": 3.920593931568754, + "grad_norm": 1.5436587809535596, + "learning_rate": 0.0001226318508434751, + "loss": 1.8415, + "step": 6073 + }, + { + "epoch": 3.921239509360878, + "grad_norm": 1.3864057926167357, + "learning_rate": 0.00012256833147044472, + "loss": 1.5935, + "step": 6074 + }, + { + "epoch": 3.9218850871530018, + "grad_norm": 1.3814441285467858, + "learning_rate": 0.00012250481718627362, + "loss": 1.5523, + "step": 6075 + }, + { + "epoch": 3.9225306649451257, + "grad_norm": 1.5343448593324478, + "learning_rate": 0.00012244130800274436, + "loss": 1.8081, + "step": 6076 + }, + { + "epoch": 3.9231762427372496, + "grad_norm": 1.5322019888359393, + "learning_rate": 0.00012237780393163866, + "loss": 1.7304, + "step": 6077 + }, + { + "epoch": 3.9238218205293736, + "grad_norm": 1.58397137950306, + "learning_rate": 0.00012231430498473706, + "loss": 1.89, + "step": 6078 + }, + { + "epoch": 3.9244673983214975, + "grad_norm": 1.4911400693176287, + "learning_rate": 0.00012225081117381938, + "loss": 1.5561, + "step": 6079 + }, + { + "epoch": 3.9251129761136214, + "grad_norm": 1.4560860701155527, + "learning_rate": 0.00012218732251066433, + "loss": 1.8205, + "step": 6080 + }, + { + "epoch": 3.925758553905746, + "grad_norm": 1.432454874629627, + "learning_rate": 0.00012212383900704965, + "loss": 1.6381, + "step": 6081 + }, + { + "epoch": 3.9264041316978697, + "grad_norm": 1.237993259916111, + "learning_rate": 0.00012206036067475232, + "loss": 1.4478, + "step": 6082 + }, + { + "epoch": 3.9270497094899937, + "grad_norm": 1.569989110328939, + "learning_rate": 0.00012199688752554817, + "loss": 1.6591, + "step": 6083 + }, + { + "epoch": 3.9276952872821176, + "grad_norm": 1.4642303348426968, + "learning_rate": 0.00012193341957121208, + "loss": 1.7032, + "step": 6084 + }, + { + "epoch": 3.9283408650742415, + "grad_norm": 1.4149689833935515, + "learning_rate": 0.00012186995682351805, + "loss": 1.6401, + "step": 6085 + }, + { + "epoch": 3.9289864428663654, + "grad_norm": 1.3411997177597899, + "learning_rate": 0.00012180649929423912, + "loss": 1.4147, + "step": 6086 + }, + { + "epoch": 3.9296320206584894, + "grad_norm": 1.4635728137310136, + "learning_rate": 0.00012174304699514732, + "loss": 1.6562, + "step": 6087 + }, + { + "epoch": 3.9302775984506133, + "grad_norm": 1.454576468289144, + "learning_rate": 0.0001216795999380136, + "loss": 1.6662, + "step": 6088 + }, + { + "epoch": 3.9309231762427372, + "grad_norm": 1.3539353699529904, + "learning_rate": 0.00012161615813460819, + "loss": 1.4647, + "step": 6089 + }, + { + "epoch": 3.931568754034861, + "grad_norm": 1.7271718632842112, + "learning_rate": 0.00012155272159670015, + "loss": 1.6944, + "step": 6090 + }, + { + "epoch": 3.932214331826985, + "grad_norm": 1.3832927801359411, + "learning_rate": 0.00012148929033605754, + "loss": 1.4139, + "step": 6091 + }, + { + "epoch": 3.932859909619109, + "grad_norm": 1.534530412149735, + "learning_rate": 0.00012142586436444762, + "loss": 1.8597, + "step": 6092 + }, + { + "epoch": 3.933505487411233, + "grad_norm": 1.449323674150749, + "learning_rate": 0.00012136244369363653, + "loss": 1.5417, + "step": 6093 + }, + { + "epoch": 3.934151065203357, + "grad_norm": 1.438590260686065, + "learning_rate": 0.00012129902833538946, + "loss": 1.5936, + "step": 6094 + }, + { + "epoch": 3.934796642995481, + "grad_norm": 1.4624774372175473, + "learning_rate": 0.00012123561830147058, + "loss": 1.7273, + "step": 6095 + }, + { + "epoch": 3.9354422207876048, + "grad_norm": 1.4612398323347722, + "learning_rate": 0.00012117221360364323, + "loss": 1.6848, + "step": 6096 + }, + { + "epoch": 3.936087798579729, + "grad_norm": 1.4135580919960349, + "learning_rate": 0.00012110881425366952, + "loss": 1.5734, + "step": 6097 + }, + { + "epoch": 3.936733376371853, + "grad_norm": 1.2413134979551492, + "learning_rate": 0.0001210454202633107, + "loss": 1.2617, + "step": 6098 + }, + { + "epoch": 3.937378954163977, + "grad_norm": 1.6009198040476322, + "learning_rate": 0.0001209820316443271, + "loss": 1.7791, + "step": 6099 + }, + { + "epoch": 3.938024531956101, + "grad_norm": 1.5550501768378757, + "learning_rate": 0.00012091864840847785, + "loss": 1.7935, + "step": 6100 + }, + { + "epoch": 3.938670109748225, + "grad_norm": 1.389267731129483, + "learning_rate": 0.00012085527056752126, + "loss": 1.5254, + "step": 6101 + }, + { + "epoch": 3.9393156875403488, + "grad_norm": 1.479271622883299, + "learning_rate": 0.00012079189813321463, + "loss": 1.6966, + "step": 6102 + }, + { + "epoch": 3.9399612653324727, + "grad_norm": 1.4798376468348369, + "learning_rate": 0.00012072853111731407, + "loss": 1.8217, + "step": 6103 + }, + { + "epoch": 3.9406068431245966, + "grad_norm": 1.2830441952614526, + "learning_rate": 0.00012066516953157493, + "loss": 1.3716, + "step": 6104 + }, + { + "epoch": 3.9412524209167206, + "grad_norm": 1.4034145914837934, + "learning_rate": 0.00012060181338775146, + "loss": 1.7399, + "step": 6105 + }, + { + "epoch": 3.9418979987088445, + "grad_norm": 1.9408754870334393, + "learning_rate": 0.00012053846269759674, + "loss": 1.9148, + "step": 6106 + }, + { + "epoch": 3.9425435765009684, + "grad_norm": 1.508825636313285, + "learning_rate": 0.00012047511747286312, + "loss": 1.6804, + "step": 6107 + }, + { + "epoch": 3.9431891542930924, + "grad_norm": 1.346005902026639, + "learning_rate": 0.00012041177772530179, + "loss": 1.6005, + "step": 6108 + }, + { + "epoch": 3.9438347320852163, + "grad_norm": 1.3038915734052057, + "learning_rate": 0.0001203484434666628, + "loss": 1.5702, + "step": 6109 + }, + { + "epoch": 3.94448030987734, + "grad_norm": 1.3990890649449854, + "learning_rate": 0.00012028511470869543, + "loss": 1.6011, + "step": 6110 + }, + { + "epoch": 3.945125887669464, + "grad_norm": 1.4090447813235614, + "learning_rate": 0.0001202217914631478, + "loss": 1.5263, + "step": 6111 + }, + { + "epoch": 3.945771465461588, + "grad_norm": 1.2601518514579728, + "learning_rate": 0.00012015847374176706, + "loss": 1.3353, + "step": 6112 + }, + { + "epoch": 3.946417043253712, + "grad_norm": 1.3340986044125909, + "learning_rate": 0.00012009516155629923, + "loss": 1.5484, + "step": 6113 + }, + { + "epoch": 3.947062621045836, + "grad_norm": 1.565074135177624, + "learning_rate": 0.00012003185491848941, + "loss": 1.8243, + "step": 6114 + }, + { + "epoch": 3.94770819883796, + "grad_norm": 1.4360704215441076, + "learning_rate": 0.00011996855384008172, + "loss": 1.6634, + "step": 6115 + }, + { + "epoch": 3.948353776630084, + "grad_norm": 1.3711034932363793, + "learning_rate": 0.00011990525833281899, + "loss": 1.5434, + "step": 6116 + }, + { + "epoch": 3.9489993544222077, + "grad_norm": 1.30914358981421, + "learning_rate": 0.0001198419684084433, + "loss": 1.3559, + "step": 6117 + }, + { + "epoch": 3.9496449322143317, + "grad_norm": 1.4400661488117543, + "learning_rate": 0.00011977868407869566, + "loss": 1.6077, + "step": 6118 + }, + { + "epoch": 3.9502905100064556, + "grad_norm": 1.5975807315856048, + "learning_rate": 0.00011971540535531587, + "loss": 1.8066, + "step": 6119 + }, + { + "epoch": 3.9509360877985795, + "grad_norm": 1.690575254128149, + "learning_rate": 0.00011965213225004277, + "loss": 2.0118, + "step": 6120 + }, + { + "epoch": 3.9515816655907035, + "grad_norm": 1.6352798691960864, + "learning_rate": 0.0001195888647746143, + "loss": 1.8426, + "step": 6121 + }, + { + "epoch": 3.9522272433828274, + "grad_norm": 1.6306816037272065, + "learning_rate": 0.0001195256029407671, + "loss": 1.7489, + "step": 6122 + }, + { + "epoch": 3.9528728211749513, + "grad_norm": 1.463420629419024, + "learning_rate": 0.00011946234676023693, + "loss": 1.7339, + "step": 6123 + }, + { + "epoch": 3.9535183989670757, + "grad_norm": 1.4169437146397705, + "learning_rate": 0.00011939909624475857, + "loss": 1.6449, + "step": 6124 + }, + { + "epoch": 3.9541639767591996, + "grad_norm": 1.429694703835817, + "learning_rate": 0.00011933585140606552, + "loss": 1.6133, + "step": 6125 + }, + { + "epoch": 3.9548095545513235, + "grad_norm": 1.6322697814544502, + "learning_rate": 0.00011927261225589037, + "loss": 1.7591, + "step": 6126 + }, + { + "epoch": 3.9554551323434475, + "grad_norm": 1.697422874506784, + "learning_rate": 0.00011920937880596474, + "loss": 1.6276, + "step": 6127 + }, + { + "epoch": 3.9561007101355714, + "grad_norm": 1.5100207275046025, + "learning_rate": 0.00011914615106801896, + "loss": 1.8836, + "step": 6128 + }, + { + "epoch": 3.9567462879276953, + "grad_norm": 1.6532648367164713, + "learning_rate": 0.00011908292905378254, + "loss": 1.7278, + "step": 6129 + }, + { + "epoch": 3.9573918657198193, + "grad_norm": 1.4727135737126837, + "learning_rate": 0.00011901971277498378, + "loss": 1.7163, + "step": 6130 + }, + { + "epoch": 3.958037443511943, + "grad_norm": 1.6370126860151046, + "learning_rate": 0.00011895650224334991, + "loss": 1.8948, + "step": 6131 + }, + { + "epoch": 3.958683021304067, + "grad_norm": 1.2580665191262599, + "learning_rate": 0.0001188932974706072, + "loss": 1.4109, + "step": 6132 + }, + { + "epoch": 3.959328599096191, + "grad_norm": 1.3640040252436787, + "learning_rate": 0.0001188300984684808, + "loss": 1.6912, + "step": 6133 + }, + { + "epoch": 3.959974176888315, + "grad_norm": 1.3525563721252274, + "learning_rate": 0.00011876690524869471, + "loss": 1.2857, + "step": 6134 + }, + { + "epoch": 3.960619754680439, + "grad_norm": 1.546675780335712, + "learning_rate": 0.00011870371782297195, + "loss": 1.7549, + "step": 6135 + }, + { + "epoch": 3.961265332472563, + "grad_norm": 1.5464143518362523, + "learning_rate": 0.0001186405362030345, + "loss": 1.5044, + "step": 6136 + }, + { + "epoch": 3.961910910264687, + "grad_norm": 1.519001868486667, + "learning_rate": 0.00011857736040060321, + "loss": 1.6663, + "step": 6137 + }, + { + "epoch": 3.9625564880568107, + "grad_norm": 1.4827337171095871, + "learning_rate": 0.00011851419042739775, + "loss": 1.5743, + "step": 6138 + }, + { + "epoch": 3.9632020658489346, + "grad_norm": 1.484871236198715, + "learning_rate": 0.00011845102629513689, + "loss": 1.564, + "step": 6139 + }, + { + "epoch": 3.963847643641059, + "grad_norm": 1.4254649908276407, + "learning_rate": 0.00011838786801553823, + "loss": 1.4169, + "step": 6140 + }, + { + "epoch": 3.964493221433183, + "grad_norm": 1.5032079143883248, + "learning_rate": 0.0001183247156003182, + "loss": 1.7835, + "step": 6141 + }, + { + "epoch": 3.965138799225307, + "grad_norm": 1.7032253783239384, + "learning_rate": 0.00011826156906119233, + "loss": 1.8847, + "step": 6142 + }, + { + "epoch": 3.965784377017431, + "grad_norm": 1.5382067034120965, + "learning_rate": 0.00011819842840987495, + "loss": 1.5065, + "step": 6143 + }, + { + "epoch": 3.9664299548095547, + "grad_norm": 1.500029954351139, + "learning_rate": 0.00011813529365807925, + "loss": 1.7274, + "step": 6144 + }, + { + "epoch": 3.9670755326016787, + "grad_norm": 1.4804886512580635, + "learning_rate": 0.00011807216481751739, + "loss": 1.6632, + "step": 6145 + }, + { + "epoch": 3.9677211103938026, + "grad_norm": 1.4643875277199672, + "learning_rate": 0.00011800904189990049, + "loss": 1.6922, + "step": 6146 + }, + { + "epoch": 3.9683666881859265, + "grad_norm": 1.4316979982780242, + "learning_rate": 0.00011794592491693845, + "loss": 1.6199, + "step": 6147 + }, + { + "epoch": 3.9690122659780505, + "grad_norm": 1.2294703861850655, + "learning_rate": 0.00011788281388034008, + "loss": 1.349, + "step": 6148 + }, + { + "epoch": 3.9696578437701744, + "grad_norm": 1.4647875901190328, + "learning_rate": 0.00011781970880181327, + "loss": 1.6912, + "step": 6149 + }, + { + "epoch": 3.9703034215622983, + "grad_norm": 1.4605951035510778, + "learning_rate": 0.00011775660969306455, + "loss": 1.6784, + "step": 6150 + }, + { + "epoch": 3.9709489993544222, + "grad_norm": 1.5823164583322114, + "learning_rate": 0.00011769351656579947, + "loss": 1.7619, + "step": 6151 + }, + { + "epoch": 3.971594577146546, + "grad_norm": 1.4966537208223367, + "learning_rate": 0.00011763042943172256, + "loss": 1.6418, + "step": 6152 + }, + { + "epoch": 3.97224015493867, + "grad_norm": 1.428103481257441, + "learning_rate": 0.000117567348302537, + "loss": 1.6417, + "step": 6153 + }, + { + "epoch": 3.972885732730794, + "grad_norm": 1.455717023087332, + "learning_rate": 0.00011750427318994506, + "loss": 1.4592, + "step": 6154 + }, + { + "epoch": 3.973531310522918, + "grad_norm": 1.273723318068575, + "learning_rate": 0.00011744120410564789, + "loss": 1.4198, + "step": 6155 + }, + { + "epoch": 3.974176888315042, + "grad_norm": 1.5573817607835383, + "learning_rate": 0.00011737814106134532, + "loss": 1.5825, + "step": 6156 + }, + { + "epoch": 3.974822466107166, + "grad_norm": 2.0790425076639574, + "learning_rate": 0.00011731508406873632, + "loss": 1.7147, + "step": 6157 + }, + { + "epoch": 3.9754680438992898, + "grad_norm": 1.4495449155601976, + "learning_rate": 0.00011725203313951859, + "loss": 1.6412, + "step": 6158 + }, + { + "epoch": 3.9761136216914137, + "grad_norm": 1.4152662294933287, + "learning_rate": 0.00011718898828538867, + "loss": 1.491, + "step": 6159 + }, + { + "epoch": 3.9767591994835376, + "grad_norm": 1.4337598875556854, + "learning_rate": 0.00011712594951804207, + "loss": 1.5264, + "step": 6160 + }, + { + "epoch": 3.9774047772756616, + "grad_norm": 1.6212194721581852, + "learning_rate": 0.00011706291684917319, + "loss": 1.9331, + "step": 6161 + }, + { + "epoch": 3.9780503550677855, + "grad_norm": 1.4449366512897415, + "learning_rate": 0.00011699989029047521, + "loss": 1.6458, + "step": 6162 + }, + { + "epoch": 3.9786959328599094, + "grad_norm": 1.3169595444022457, + "learning_rate": 0.00011693686985364016, + "loss": 1.3545, + "step": 6163 + }, + { + "epoch": 3.9793415106520333, + "grad_norm": 1.4566935043132356, + "learning_rate": 0.00011687385555035905, + "loss": 1.5829, + "step": 6164 + }, + { + "epoch": 3.9799870884441573, + "grad_norm": 1.585911523542346, + "learning_rate": 0.00011681084739232169, + "loss": 1.6535, + "step": 6165 + }, + { + "epoch": 3.980632666236281, + "grad_norm": 1.4565055696538607, + "learning_rate": 0.00011674784539121668, + "loss": 1.5568, + "step": 6166 + }, + { + "epoch": 3.9812782440284056, + "grad_norm": 1.5613782332664312, + "learning_rate": 0.00011668484955873159, + "loss": 1.548, + "step": 6167 + }, + { + "epoch": 3.9819238218205295, + "grad_norm": 1.5386424450638727, + "learning_rate": 0.00011662185990655284, + "loss": 1.6884, + "step": 6168 + }, + { + "epoch": 3.9825693996126534, + "grad_norm": 1.6494614063741022, + "learning_rate": 0.0001165588764463656, + "loss": 1.8998, + "step": 6169 + }, + { + "epoch": 3.9832149774047774, + "grad_norm": 1.3957135624808505, + "learning_rate": 0.00011649589918985395, + "loss": 1.5503, + "step": 6170 + }, + { + "epoch": 3.9838605551969013, + "grad_norm": 1.7351321441711969, + "learning_rate": 0.00011643292814870092, + "loss": 1.8171, + "step": 6171 + }, + { + "epoch": 3.9845061329890252, + "grad_norm": 1.5130812111276077, + "learning_rate": 0.00011636996333458819, + "loss": 1.6488, + "step": 6172 + }, + { + "epoch": 3.985151710781149, + "grad_norm": 1.416206648991376, + "learning_rate": 0.00011630700475919638, + "loss": 1.502, + "step": 6173 + }, + { + "epoch": 3.985797288573273, + "grad_norm": 1.390931389464636, + "learning_rate": 0.00011624405243420508, + "loss": 1.4125, + "step": 6174 + }, + { + "epoch": 3.986442866365397, + "grad_norm": 1.4646352940228258, + "learning_rate": 0.0001161811063712925, + "loss": 1.5939, + "step": 6175 + }, + { + "epoch": 3.987088444157521, + "grad_norm": 1.4131514805813596, + "learning_rate": 0.00011611816658213577, + "loss": 1.6682, + "step": 6176 + }, + { + "epoch": 3.987734021949645, + "grad_norm": 1.4865266635614682, + "learning_rate": 0.00011605523307841097, + "loss": 1.6768, + "step": 6177 + }, + { + "epoch": 3.988379599741769, + "grad_norm": 1.5471607679789678, + "learning_rate": 0.00011599230587179282, + "loss": 1.734, + "step": 6178 + }, + { + "epoch": 3.9890251775338927, + "grad_norm": 1.5321484572281923, + "learning_rate": 0.00011592938497395503, + "loss": 1.8749, + "step": 6179 + }, + { + "epoch": 3.9896707553260167, + "grad_norm": 1.480919287712151, + "learning_rate": 0.00011586647039657009, + "loss": 1.5482, + "step": 6180 + }, + { + "epoch": 3.9903163331181406, + "grad_norm": 1.482270657429344, + "learning_rate": 0.00011580356215130922, + "loss": 1.7178, + "step": 6181 + }, + { + "epoch": 3.9909619109102645, + "grad_norm": 1.433450914606397, + "learning_rate": 0.00011574066024984265, + "loss": 1.7805, + "step": 6182 + }, + { + "epoch": 3.991607488702389, + "grad_norm": 1.3903490930314482, + "learning_rate": 0.00011567776470383932, + "loss": 1.5299, + "step": 6183 + }, + { + "epoch": 3.992253066494513, + "grad_norm": 1.2408834539499867, + "learning_rate": 0.00011561487552496693, + "loss": 1.4119, + "step": 6184 + }, + { + "epoch": 3.9928986442866368, + "grad_norm": 1.3085128119247011, + "learning_rate": 0.00011555199272489214, + "loss": 1.6107, + "step": 6185 + }, + { + "epoch": 3.9935442220787607, + "grad_norm": 1.559664392521703, + "learning_rate": 0.00011548911631528036, + "loss": 1.774, + "step": 6186 + }, + { + "epoch": 3.9941897998708846, + "grad_norm": 1.4080231087042843, + "learning_rate": 0.00011542624630779587, + "loss": 1.5752, + "step": 6187 + }, + { + "epoch": 3.9948353776630086, + "grad_norm": 1.3108127436270622, + "learning_rate": 0.00011536338271410157, + "loss": 1.4986, + "step": 6188 + }, + { + "epoch": 3.9954809554551325, + "grad_norm": 1.4744823139544199, + "learning_rate": 0.00011530052554585943, + "loss": 1.5364, + "step": 6189 + }, + { + "epoch": 3.9961265332472564, + "grad_norm": 1.483870483339576, + "learning_rate": 0.00011523767481473009, + "loss": 1.6159, + "step": 6190 + }, + { + "epoch": 3.9967721110393803, + "grad_norm": 1.558314402085897, + "learning_rate": 0.00011517483053237293, + "loss": 1.7725, + "step": 6191 + }, + { + "epoch": 3.9974176888315043, + "grad_norm": 1.3301801650944354, + "learning_rate": 0.00011511199271044627, + "loss": 1.4811, + "step": 6192 + }, + { + "epoch": 3.998063266623628, + "grad_norm": 1.4409023000207535, + "learning_rate": 0.00011504916136060725, + "loss": 1.507, + "step": 6193 + }, + { + "epoch": 3.998708844415752, + "grad_norm": 1.448292631424544, + "learning_rate": 0.00011498633649451164, + "loss": 1.5025, + "step": 6194 + }, + { + "epoch": 3.999354422207876, + "grad_norm": 1.5385157523643695, + "learning_rate": 0.00011492351812381411, + "loss": 1.7551, + "step": 6195 + }, + { + "epoch": 4.0, + "grad_norm": 1.3680816061454626, + "learning_rate": 0.00011486070626016821, + "loss": 1.5148, + "step": 6196 + }, + { + "epoch": 4.0, + "eval_loss": 2.092881679534912, + "eval_runtime": 58.42, + "eval_samples_per_second": 5.94, + "eval_steps_per_second": 5.94, + "step": 6196 + }, + { + "epoch": 4.000645577792124, + "grad_norm": 1.4121153806176867, + "learning_rate": 0.00011479790091522611, + "loss": 1.342, + "step": 6197 + }, + { + "epoch": 4.001291155584248, + "grad_norm": 1.1572261476996484, + "learning_rate": 0.00011473510210063882, + "loss": 1.0177, + "step": 6198 + }, + { + "epoch": 4.001936733376372, + "grad_norm": 1.237143716648574, + "learning_rate": 0.0001146723098280563, + "loss": 1.0235, + "step": 6199 + }, + { + "epoch": 4.002582311168496, + "grad_norm": 1.1924541840003768, + "learning_rate": 0.00011460952410912702, + "loss": 1.1009, + "step": 6200 + }, + { + "epoch": 4.00322788896062, + "grad_norm": 1.2699574218519656, + "learning_rate": 0.00011454674495549849, + "loss": 1.2063, + "step": 6201 + }, + { + "epoch": 4.003873466752744, + "grad_norm": 1.152874984878857, + "learning_rate": 0.0001144839723788169, + "loss": 0.8987, + "step": 6202 + }, + { + "epoch": 4.0045190445448675, + "grad_norm": 1.2910581406771091, + "learning_rate": 0.00011442120639072712, + "loss": 1.1118, + "step": 6203 + }, + { + "epoch": 4.005164622336991, + "grad_norm": 1.5029597529289338, + "learning_rate": 0.000114358447002873, + "loss": 1.2288, + "step": 6204 + }, + { + "epoch": 4.005810200129115, + "grad_norm": 1.1660809094687659, + "learning_rate": 0.000114295694226897, + "loss": 0.7722, + "step": 6205 + }, + { + "epoch": 4.006455777921239, + "grad_norm": 2.0995072901546936, + "learning_rate": 0.00011423294807444037, + "loss": 1.1014, + "step": 6206 + }, + { + "epoch": 4.007101355713363, + "grad_norm": 1.410061106223774, + "learning_rate": 0.00011417020855714325, + "loss": 1.0152, + "step": 6207 + }, + { + "epoch": 4.007746933505487, + "grad_norm": 1.8526430204299082, + "learning_rate": 0.00011410747568664446, + "loss": 1.4298, + "step": 6208 + }, + { + "epoch": 4.008392511297611, + "grad_norm": 1.5191171616113783, + "learning_rate": 0.00011404474947458154, + "loss": 1.0479, + "step": 6209 + }, + { + "epoch": 4.009038089089735, + "grad_norm": 1.7204102423488503, + "learning_rate": 0.00011398202993259088, + "loss": 1.1257, + "step": 6210 + }, + { + "epoch": 4.009683666881859, + "grad_norm": 1.6806070062387761, + "learning_rate": 0.00011391931707230766, + "loss": 1.2516, + "step": 6211 + }, + { + "epoch": 4.010329244673983, + "grad_norm": 1.6305287973992444, + "learning_rate": 0.00011385661090536578, + "loss": 1.1426, + "step": 6212 + }, + { + "epoch": 4.010974822466107, + "grad_norm": 1.5669325165480532, + "learning_rate": 0.00011379391144339774, + "loss": 0.9378, + "step": 6213 + }, + { + "epoch": 4.011620400258231, + "grad_norm": 1.9405598032900113, + "learning_rate": 0.00011373121869803508, + "loss": 1.1505, + "step": 6214 + }, + { + "epoch": 4.012265978050355, + "grad_norm": 2.0069195869066916, + "learning_rate": 0.00011366853268090792, + "loss": 1.2566, + "step": 6215 + }, + { + "epoch": 4.012911555842479, + "grad_norm": 1.6827462879449566, + "learning_rate": 0.00011360585340364512, + "loss": 1.0819, + "step": 6216 + }, + { + "epoch": 4.0135571336346025, + "grad_norm": 1.4850130732654325, + "learning_rate": 0.00011354318087787437, + "loss": 0.9771, + "step": 6217 + }, + { + "epoch": 4.014202711426727, + "grad_norm": 1.682879861662577, + "learning_rate": 0.00011348051511522214, + "loss": 1.1299, + "step": 6218 + }, + { + "epoch": 4.014848289218851, + "grad_norm": 1.6337599970749759, + "learning_rate": 0.00011341785612731348, + "loss": 1.1449, + "step": 6219 + }, + { + "epoch": 4.015493867010975, + "grad_norm": 1.6968191222493867, + "learning_rate": 0.00011335520392577229, + "loss": 1.2461, + "step": 6220 + }, + { + "epoch": 4.016139444803099, + "grad_norm": 1.5180058911278345, + "learning_rate": 0.00011329255852222128, + "loss": 1.1087, + "step": 6221 + }, + { + "epoch": 4.016785022595223, + "grad_norm": 1.6964832054337695, + "learning_rate": 0.00011322991992828178, + "loss": 1.2146, + "step": 6222 + }, + { + "epoch": 4.017430600387347, + "grad_norm": 1.365346278642368, + "learning_rate": 0.00011316728815557385, + "loss": 0.882, + "step": 6223 + }, + { + "epoch": 4.018076178179471, + "grad_norm": 1.7006515017337307, + "learning_rate": 0.00011310466321571643, + "loss": 1.1269, + "step": 6224 + }, + { + "epoch": 4.018721755971595, + "grad_norm": 1.6922873292861633, + "learning_rate": 0.000113042045120327, + "loss": 1.3013, + "step": 6225 + }, + { + "epoch": 4.019367333763719, + "grad_norm": 1.5726627375352933, + "learning_rate": 0.00011297943388102193, + "loss": 1.1801, + "step": 6226 + }, + { + "epoch": 4.020012911555843, + "grad_norm": 1.5890883642191798, + "learning_rate": 0.00011291682950941629, + "loss": 1.0138, + "step": 6227 + }, + { + "epoch": 4.020658489347967, + "grad_norm": 1.6582058817964174, + "learning_rate": 0.00011285423201712371, + "loss": 1.044, + "step": 6228 + }, + { + "epoch": 4.021304067140091, + "grad_norm": 1.8918558023082026, + "learning_rate": 0.00011279164141575678, + "loss": 1.4124, + "step": 6229 + }, + { + "epoch": 4.0219496449322145, + "grad_norm": 1.8627083130463162, + "learning_rate": 0.00011272905771692674, + "loss": 1.1037, + "step": 6230 + }, + { + "epoch": 4.0225952227243384, + "grad_norm": 1.3348692411675533, + "learning_rate": 0.00011266648093224338, + "loss": 0.898, + "step": 6231 + }, + { + "epoch": 4.023240800516462, + "grad_norm": 1.693292478367887, + "learning_rate": 0.00011260391107331544, + "loss": 1.2068, + "step": 6232 + }, + { + "epoch": 4.023886378308586, + "grad_norm": 1.626299330132819, + "learning_rate": 0.00011254134815175029, + "loss": 1.2377, + "step": 6233 + }, + { + "epoch": 4.02453195610071, + "grad_norm": 1.422513419982441, + "learning_rate": 0.00011247879217915392, + "loss": 1.0069, + "step": 6234 + }, + { + "epoch": 4.025177533892834, + "grad_norm": 1.630758924668749, + "learning_rate": 0.00011241624316713114, + "loss": 1.1114, + "step": 6235 + }, + { + "epoch": 4.025823111684958, + "grad_norm": 1.889822285222843, + "learning_rate": 0.00011235370112728553, + "loss": 1.3237, + "step": 6236 + }, + { + "epoch": 4.026468689477082, + "grad_norm": 1.5905024767739324, + "learning_rate": 0.00011229116607121924, + "loss": 1.034, + "step": 6237 + }, + { + "epoch": 4.027114267269206, + "grad_norm": 1.4670725031570668, + "learning_rate": 0.00011222863801053306, + "loss": 1.008, + "step": 6238 + }, + { + "epoch": 4.02775984506133, + "grad_norm": 1.5426525148978287, + "learning_rate": 0.00011216611695682673, + "loss": 0.9991, + "step": 6239 + }, + { + "epoch": 4.028405422853454, + "grad_norm": 1.5343610513134736, + "learning_rate": 0.00011210360292169858, + "loss": 1.0785, + "step": 6240 + }, + { + "epoch": 4.029051000645578, + "grad_norm": 1.4668907147318475, + "learning_rate": 0.00011204109591674545, + "loss": 0.9637, + "step": 6241 + }, + { + "epoch": 4.029696578437702, + "grad_norm": 1.463227761558209, + "learning_rate": 0.00011197859595356313, + "loss": 0.9833, + "step": 6242 + }, + { + "epoch": 4.030342156229826, + "grad_norm": 1.816588264837462, + "learning_rate": 0.00011191610304374609, + "loss": 1.196, + "step": 6243 + }, + { + "epoch": 4.0309877340219495, + "grad_norm": 1.610365813771707, + "learning_rate": 0.00011185361719888731, + "loss": 0.8645, + "step": 6244 + }, + { + "epoch": 4.0316333118140735, + "grad_norm": 1.7063873606775635, + "learning_rate": 0.00011179113843057854, + "loss": 1.0856, + "step": 6245 + }, + { + "epoch": 4.032278889606197, + "grad_norm": 1.6421270745121928, + "learning_rate": 0.00011172866675041037, + "loss": 1.1671, + "step": 6246 + }, + { + "epoch": 4.032924467398321, + "grad_norm": 1.5249972602838735, + "learning_rate": 0.00011166620216997186, + "loss": 1.0144, + "step": 6247 + }, + { + "epoch": 4.033570045190445, + "grad_norm": 1.5025889466896651, + "learning_rate": 0.00011160374470085077, + "loss": 0.9805, + "step": 6248 + }, + { + "epoch": 4.034215622982569, + "grad_norm": 1.6968118928322762, + "learning_rate": 0.00011154129435463377, + "loss": 1.0026, + "step": 6249 + }, + { + "epoch": 4.034861200774693, + "grad_norm": 1.7369867219298267, + "learning_rate": 0.00011147885114290592, + "loss": 1.0453, + "step": 6250 + }, + { + "epoch": 4.035506778566817, + "grad_norm": 1.772895684996976, + "learning_rate": 0.00011141641507725113, + "loss": 0.9572, + "step": 6251 + }, + { + "epoch": 4.036152356358941, + "grad_norm": 1.8321657266346547, + "learning_rate": 0.00011135398616925196, + "loss": 1.1344, + "step": 6252 + }, + { + "epoch": 4.036797934151065, + "grad_norm": 1.8380668481347797, + "learning_rate": 0.00011129156443048955, + "loss": 1.1139, + "step": 6253 + }, + { + "epoch": 4.037443511943189, + "grad_norm": 1.8743986758057283, + "learning_rate": 0.00011122914987254383, + "loss": 1.0501, + "step": 6254 + }, + { + "epoch": 4.038089089735313, + "grad_norm": 1.795732436694832, + "learning_rate": 0.0001111667425069934, + "loss": 1.0504, + "step": 6255 + }, + { + "epoch": 4.038734667527437, + "grad_norm": 1.405558377511246, + "learning_rate": 0.00011110434234541535, + "loss": 0.9509, + "step": 6256 + }, + { + "epoch": 4.039380245319561, + "grad_norm": 1.8518357238787946, + "learning_rate": 0.00011104194939938567, + "loss": 0.9452, + "step": 6257 + }, + { + "epoch": 4.040025823111685, + "grad_norm": 1.9182226430054525, + "learning_rate": 0.00011097956368047886, + "loss": 1.2516, + "step": 6258 + }, + { + "epoch": 4.0406714009038085, + "grad_norm": 1.7586737734518496, + "learning_rate": 0.00011091718520026807, + "loss": 1.2108, + "step": 6259 + }, + { + "epoch": 4.041316978695932, + "grad_norm": 1.3544353011486838, + "learning_rate": 0.00011085481397032519, + "loss": 0.9692, + "step": 6260 + }, + { + "epoch": 4.041962556488057, + "grad_norm": 1.6824273251634552, + "learning_rate": 0.00011079245000222078, + "loss": 1.0047, + "step": 6261 + }, + { + "epoch": 4.042608134280181, + "grad_norm": 1.6909256500980847, + "learning_rate": 0.000110730093307524, + "loss": 1.057, + "step": 6262 + }, + { + "epoch": 4.043253712072305, + "grad_norm": 1.4859744305957567, + "learning_rate": 0.00011066774389780256, + "loss": 1.025, + "step": 6263 + }, + { + "epoch": 4.043899289864429, + "grad_norm": 1.705813472464302, + "learning_rate": 0.00011060540178462302, + "loss": 1.0663, + "step": 6264 + }, + { + "epoch": 4.044544867656553, + "grad_norm": 1.7192939505742393, + "learning_rate": 0.0001105430669795505, + "loss": 1.1943, + "step": 6265 + }, + { + "epoch": 4.045190445448677, + "grad_norm": 1.5760004167499158, + "learning_rate": 0.00011048073949414863, + "loss": 1.0444, + "step": 6266 + }, + { + "epoch": 4.045836023240801, + "grad_norm": 1.559625937076597, + "learning_rate": 0.00011041841933997991, + "loss": 1.0427, + "step": 6267 + }, + { + "epoch": 4.046481601032925, + "grad_norm": 1.6134924103030641, + "learning_rate": 0.00011035610652860542, + "loss": 1.0647, + "step": 6268 + }, + { + "epoch": 4.047127178825049, + "grad_norm": 1.8065834626458006, + "learning_rate": 0.00011029380107158471, + "loss": 1.1119, + "step": 6269 + }, + { + "epoch": 4.047772756617173, + "grad_norm": 1.4925270898927254, + "learning_rate": 0.00011023150298047615, + "loss": 0.9369, + "step": 6270 + }, + { + "epoch": 4.0484183344092965, + "grad_norm": 1.9017125195507938, + "learning_rate": 0.00011016921226683675, + "loss": 1.0672, + "step": 6271 + }, + { + "epoch": 4.0490639122014205, + "grad_norm": 1.8742878248936894, + "learning_rate": 0.00011010692894222194, + "loss": 1.1406, + "step": 6272 + }, + { + "epoch": 4.049709489993544, + "grad_norm": 1.9164495170389608, + "learning_rate": 0.00011004465301818601, + "loss": 1.1938, + "step": 6273 + }, + { + "epoch": 4.050355067785668, + "grad_norm": 2.0845556784894743, + "learning_rate": 0.00010998238450628181, + "loss": 1.3916, + "step": 6274 + }, + { + "epoch": 4.051000645577792, + "grad_norm": 1.7265628064880352, + "learning_rate": 0.00010992012341806072, + "loss": 1.171, + "step": 6275 + }, + { + "epoch": 4.051646223369916, + "grad_norm": 1.5515772826811556, + "learning_rate": 0.00010985786976507289, + "loss": 1.0429, + "step": 6276 + }, + { + "epoch": 4.05229180116204, + "grad_norm": 1.4084985756388237, + "learning_rate": 0.00010979562355886703, + "loss": 0.9131, + "step": 6277 + }, + { + "epoch": 4.052937378954164, + "grad_norm": 1.3759586657142864, + "learning_rate": 0.00010973338481099034, + "loss": 0.9564, + "step": 6278 + }, + { + "epoch": 4.053582956746288, + "grad_norm": 1.6528645586499837, + "learning_rate": 0.00010967115353298887, + "loss": 1.1733, + "step": 6279 + }, + { + "epoch": 4.054228534538412, + "grad_norm": 1.6066547658059087, + "learning_rate": 0.00010960892973640716, + "loss": 1.1217, + "step": 6280 + }, + { + "epoch": 4.054874112330536, + "grad_norm": 2.0824533550378543, + "learning_rate": 0.00010954671343278827, + "loss": 1.2439, + "step": 6281 + }, + { + "epoch": 4.05551969012266, + "grad_norm": 1.6634650272127682, + "learning_rate": 0.00010948450463367409, + "loss": 1.1127, + "step": 6282 + }, + { + "epoch": 4.056165267914784, + "grad_norm": 1.4255609682957795, + "learning_rate": 0.00010942230335060494, + "loss": 0.9139, + "step": 6283 + }, + { + "epoch": 4.056810845706908, + "grad_norm": 1.3346356910332453, + "learning_rate": 0.00010936010959511983, + "loss": 0.845, + "step": 6284 + }, + { + "epoch": 4.057456423499032, + "grad_norm": 1.8737332254656855, + "learning_rate": 0.00010929792337875628, + "loss": 1.1947, + "step": 6285 + }, + { + "epoch": 4.0581020012911555, + "grad_norm": 1.8304037063317171, + "learning_rate": 0.00010923574471305058, + "loss": 1.2225, + "step": 6286 + }, + { + "epoch": 4.058747579083279, + "grad_norm": 1.5584089129107528, + "learning_rate": 0.00010917357360953748, + "loss": 0.9297, + "step": 6287 + }, + { + "epoch": 4.059393156875403, + "grad_norm": 1.7113923868641032, + "learning_rate": 0.0001091114100797503, + "loss": 1.0803, + "step": 6288 + }, + { + "epoch": 4.060038734667527, + "grad_norm": 1.5034693121023746, + "learning_rate": 0.00010904925413522109, + "loss": 0.8487, + "step": 6289 + }, + { + "epoch": 4.060684312459651, + "grad_norm": 1.7683492509953438, + "learning_rate": 0.00010898710578748047, + "loss": 1.2075, + "step": 6290 + }, + { + "epoch": 4.061329890251775, + "grad_norm": 2.0157679025324335, + "learning_rate": 0.00010892496504805745, + "loss": 1.2334, + "step": 6291 + }, + { + "epoch": 4.061975468043899, + "grad_norm": 1.7684246820823253, + "learning_rate": 0.00010886283192847987, + "loss": 1.1222, + "step": 6292 + }, + { + "epoch": 4.062621045836023, + "grad_norm": 1.5744541683466011, + "learning_rate": 0.00010880070644027415, + "loss": 1.0658, + "step": 6293 + }, + { + "epoch": 4.063266623628147, + "grad_norm": 1.9819603285967813, + "learning_rate": 0.00010873858859496509, + "loss": 1.0079, + "step": 6294 + }, + { + "epoch": 4.063912201420271, + "grad_norm": 1.4255552162223668, + "learning_rate": 0.00010867647840407623, + "loss": 0.8663, + "step": 6295 + }, + { + "epoch": 4.064557779212395, + "grad_norm": 1.4425311704870545, + "learning_rate": 0.0001086143758791297, + "loss": 0.9685, + "step": 6296 + }, + { + "epoch": 4.065203357004519, + "grad_norm": 1.5093970505960448, + "learning_rate": 0.00010855228103164612, + "loss": 0.9082, + "step": 6297 + }, + { + "epoch": 4.065848934796643, + "grad_norm": 1.4803974096864767, + "learning_rate": 0.00010849019387314467, + "loss": 0.9018, + "step": 6298 + }, + { + "epoch": 4.066494512588767, + "grad_norm": 1.4033373474302189, + "learning_rate": 0.00010842811441514332, + "loss": 0.9681, + "step": 6299 + }, + { + "epoch": 4.0671400903808905, + "grad_norm": 1.6308094599919902, + "learning_rate": 0.0001083660426691583, + "loss": 1.1344, + "step": 6300 + }, + { + "epoch": 4.0677856681730145, + "grad_norm": 2.0175440801172875, + "learning_rate": 0.00010830397864670465, + "loss": 1.2828, + "step": 6301 + }, + { + "epoch": 4.068431245965138, + "grad_norm": 1.6896549912333472, + "learning_rate": 0.00010824192235929591, + "loss": 1.1503, + "step": 6302 + }, + { + "epoch": 4.069076823757262, + "grad_norm": 1.644311469064541, + "learning_rate": 0.00010817987381844404, + "loss": 0.97, + "step": 6303 + }, + { + "epoch": 4.069722401549387, + "grad_norm": 1.5526992073443324, + "learning_rate": 0.00010811783303565983, + "loss": 1.0085, + "step": 6304 + }, + { + "epoch": 4.070367979341511, + "grad_norm": 1.578121637623944, + "learning_rate": 0.00010805580002245243, + "loss": 1.0051, + "step": 6305 + }, + { + "epoch": 4.071013557133635, + "grad_norm": 1.6890008723180163, + "learning_rate": 0.00010799377479032957, + "loss": 1.0423, + "step": 6306 + }, + { + "epoch": 4.071659134925759, + "grad_norm": 2.4007828063667382, + "learning_rate": 0.00010793175735079762, + "loss": 0.8555, + "step": 6307 + }, + { + "epoch": 4.072304712717883, + "grad_norm": 1.7683455621475141, + "learning_rate": 0.00010786974771536149, + "loss": 1.1801, + "step": 6308 + }, + { + "epoch": 4.072950290510007, + "grad_norm": 2.0442205984817923, + "learning_rate": 0.00010780774589552454, + "loss": 1.2405, + "step": 6309 + }, + { + "epoch": 4.073595868302131, + "grad_norm": 1.8049321288743525, + "learning_rate": 0.00010774575190278876, + "loss": 1.2258, + "step": 6310 + }, + { + "epoch": 4.074241446094255, + "grad_norm": 1.9024896480500733, + "learning_rate": 0.00010768376574865474, + "loss": 1.2018, + "step": 6311 + }, + { + "epoch": 4.074887023886379, + "grad_norm": 1.6673244736794328, + "learning_rate": 0.00010762178744462154, + "loss": 1.0208, + "step": 6312 + }, + { + "epoch": 4.0755326016785025, + "grad_norm": 1.9627262945938313, + "learning_rate": 0.00010755981700218672, + "loss": 1.2265, + "step": 6313 + }, + { + "epoch": 4.076178179470626, + "grad_norm": 1.8323204633624959, + "learning_rate": 0.0001074978544328465, + "loss": 1.1493, + "step": 6314 + }, + { + "epoch": 4.07682375726275, + "grad_norm": 1.4950615635059579, + "learning_rate": 0.00010743589974809557, + "loss": 1.0084, + "step": 6315 + }, + { + "epoch": 4.077469335054874, + "grad_norm": 1.4030198049958305, + "learning_rate": 0.00010737395295942711, + "loss": 0.8037, + "step": 6316 + }, + { + "epoch": 4.078114912846998, + "grad_norm": 1.8330102580378314, + "learning_rate": 0.00010731201407833295, + "loss": 1.1455, + "step": 6317 + }, + { + "epoch": 4.078760490639122, + "grad_norm": 1.784467420008606, + "learning_rate": 0.00010725008311630341, + "loss": 1.1587, + "step": 6318 + }, + { + "epoch": 4.079406068431246, + "grad_norm": 1.7536234909119706, + "learning_rate": 0.00010718816008482729, + "loss": 1.0401, + "step": 6319 + }, + { + "epoch": 4.08005164622337, + "grad_norm": 1.7640237739066045, + "learning_rate": 0.00010712624499539192, + "loss": 1.1846, + "step": 6320 + }, + { + "epoch": 4.080697224015494, + "grad_norm": 1.8061898987148275, + "learning_rate": 0.00010706433785948334, + "loss": 1.1844, + "step": 6321 + }, + { + "epoch": 4.081342801807618, + "grad_norm": 1.5122262891868936, + "learning_rate": 0.0001070024386885858, + "loss": 1.0995, + "step": 6322 + }, + { + "epoch": 4.081988379599742, + "grad_norm": 1.8539550624249657, + "learning_rate": 0.00010694054749418226, + "loss": 1.1346, + "step": 6323 + }, + { + "epoch": 4.082633957391866, + "grad_norm": 1.54582660270599, + "learning_rate": 0.0001068786642877543, + "loss": 1.014, + "step": 6324 + }, + { + "epoch": 4.08327953518399, + "grad_norm": 1.6202355700483022, + "learning_rate": 0.00010681678908078177, + "loss": 1.0315, + "step": 6325 + }, + { + "epoch": 4.083925112976114, + "grad_norm": 1.6890912629918378, + "learning_rate": 0.00010675492188474323, + "loss": 1.1295, + "step": 6326 + }, + { + "epoch": 4.0845706907682375, + "grad_norm": 1.434677349239813, + "learning_rate": 0.00010669306271111568, + "loss": 0.8915, + "step": 6327 + }, + { + "epoch": 4.0852162685603615, + "grad_norm": 1.7716921185679175, + "learning_rate": 0.0001066312115713746, + "loss": 1.2476, + "step": 6328 + }, + { + "epoch": 4.085861846352485, + "grad_norm": 1.5938615612050298, + "learning_rate": 0.00010656936847699405, + "loss": 1.0407, + "step": 6329 + }, + { + "epoch": 4.086507424144609, + "grad_norm": 1.5320781289563565, + "learning_rate": 0.00010650753343944662, + "loss": 1.0382, + "step": 6330 + }, + { + "epoch": 4.087153001936733, + "grad_norm": 1.3609805236219152, + "learning_rate": 0.00010644570647020324, + "loss": 0.9454, + "step": 6331 + }, + { + "epoch": 4.087798579728857, + "grad_norm": 1.349873351234952, + "learning_rate": 0.00010638388758073347, + "loss": 0.9106, + "step": 6332 + }, + { + "epoch": 4.088444157520981, + "grad_norm": 1.9126952220554034, + "learning_rate": 0.00010632207678250547, + "loss": 1.2747, + "step": 6333 + }, + { + "epoch": 4.089089735313105, + "grad_norm": 1.7146338901279816, + "learning_rate": 0.00010626027408698568, + "loss": 1.1745, + "step": 6334 + }, + { + "epoch": 4.089735313105229, + "grad_norm": 1.7489036705629923, + "learning_rate": 0.00010619847950563916, + "loss": 1.0848, + "step": 6335 + }, + { + "epoch": 4.090380890897353, + "grad_norm": 1.60096161914722, + "learning_rate": 0.00010613669304992947, + "loss": 1.0525, + "step": 6336 + }, + { + "epoch": 4.091026468689477, + "grad_norm": 1.5664829522532957, + "learning_rate": 0.00010607491473131867, + "loss": 1.0246, + "step": 6337 + }, + { + "epoch": 4.091672046481601, + "grad_norm": 1.417158250200715, + "learning_rate": 0.00010601314456126716, + "loss": 1.019, + "step": 6338 + }, + { + "epoch": 4.092317624273725, + "grad_norm": 1.8294005271275111, + "learning_rate": 0.00010595138255123407, + "loss": 1.1428, + "step": 6339 + }, + { + "epoch": 4.092963202065849, + "grad_norm": 1.6208158454265327, + "learning_rate": 0.00010588962871267687, + "loss": 1.1446, + "step": 6340 + }, + { + "epoch": 4.093608779857973, + "grad_norm": 1.5621312988574567, + "learning_rate": 0.00010582788305705146, + "loss": 0.9613, + "step": 6341 + }, + { + "epoch": 4.0942543576500965, + "grad_norm": 1.6740504761910098, + "learning_rate": 0.00010576614559581235, + "loss": 1.1119, + "step": 6342 + }, + { + "epoch": 4.09489993544222, + "grad_norm": 1.3414087286142022, + "learning_rate": 0.00010570441634041255, + "loss": 0.8119, + "step": 6343 + }, + { + "epoch": 4.095545513234344, + "grad_norm": 1.4828899158431412, + "learning_rate": 0.00010564269530230339, + "loss": 1.0197, + "step": 6344 + }, + { + "epoch": 4.096191091026468, + "grad_norm": 1.5644858454108315, + "learning_rate": 0.00010558098249293475, + "loss": 0.9984, + "step": 6345 + }, + { + "epoch": 4.096836668818592, + "grad_norm": 1.615311487338242, + "learning_rate": 0.0001055192779237551, + "loss": 1.0598, + "step": 6346 + }, + { + "epoch": 4.097482246610717, + "grad_norm": 1.5130498692155836, + "learning_rate": 0.00010545758160621117, + "loss": 0.9083, + "step": 6347 + }, + { + "epoch": 4.098127824402841, + "grad_norm": 1.5151015741868554, + "learning_rate": 0.0001053958935517483, + "loss": 1.0246, + "step": 6348 + }, + { + "epoch": 4.098773402194965, + "grad_norm": 1.637448447988613, + "learning_rate": 0.00010533421377181033, + "loss": 1.1206, + "step": 6349 + }, + { + "epoch": 4.099418979987089, + "grad_norm": 1.9192509992362947, + "learning_rate": 0.00010527254227783938, + "loss": 1.1866, + "step": 6350 + }, + { + "epoch": 4.100064557779213, + "grad_norm": 1.5084044289057015, + "learning_rate": 0.00010521087908127626, + "loss": 1.0088, + "step": 6351 + }, + { + "epoch": 4.100710135571337, + "grad_norm": 1.333718845275234, + "learning_rate": 0.00010514922419356011, + "loss": 0.8585, + "step": 6352 + }, + { + "epoch": 4.101355713363461, + "grad_norm": 1.7450242805857663, + "learning_rate": 0.00010508757762612845, + "loss": 1.1218, + "step": 6353 + }, + { + "epoch": 4.1020012911555845, + "grad_norm": 1.7046778112894074, + "learning_rate": 0.0001050259393904175, + "loss": 1.0679, + "step": 6354 + }, + { + "epoch": 4.1026468689477085, + "grad_norm": 1.6087461000341001, + "learning_rate": 0.00010496430949786175, + "loss": 0.9675, + "step": 6355 + }, + { + "epoch": 4.103292446739832, + "grad_norm": 1.5220119541022723, + "learning_rate": 0.00010490268795989412, + "loss": 0.9179, + "step": 6356 + }, + { + "epoch": 4.103938024531956, + "grad_norm": 1.7075349995337012, + "learning_rate": 0.00010484107478794608, + "loss": 1.0852, + "step": 6357 + }, + { + "epoch": 4.10458360232408, + "grad_norm": 1.6937395975326839, + "learning_rate": 0.0001047794699934476, + "loss": 0.961, + "step": 6358 + }, + { + "epoch": 4.105229180116204, + "grad_norm": 1.4210090794271515, + "learning_rate": 0.0001047178735878269, + "loss": 0.9683, + "step": 6359 + }, + { + "epoch": 4.105874757908328, + "grad_norm": 1.7741461269217782, + "learning_rate": 0.00010465628558251073, + "loss": 1.1921, + "step": 6360 + }, + { + "epoch": 4.106520335700452, + "grad_norm": 1.953803302864046, + "learning_rate": 0.00010459470598892445, + "loss": 1.1583, + "step": 6361 + }, + { + "epoch": 4.107165913492576, + "grad_norm": 1.9103745161420402, + "learning_rate": 0.00010453313481849161, + "loss": 1.1545, + "step": 6362 + }, + { + "epoch": 4.1078114912847, + "grad_norm": 1.7054049017980004, + "learning_rate": 0.00010447157208263429, + "loss": 1.1025, + "step": 6363 + }, + { + "epoch": 4.108457069076824, + "grad_norm": 1.479588575108063, + "learning_rate": 0.00010441001779277303, + "loss": 0.8945, + "step": 6364 + }, + { + "epoch": 4.109102646868948, + "grad_norm": 1.7360918488006984, + "learning_rate": 0.00010434847196032686, + "loss": 1.1317, + "step": 6365 + }, + { + "epoch": 4.109748224661072, + "grad_norm": 1.5047475656380813, + "learning_rate": 0.00010428693459671306, + "loss": 0.9572, + "step": 6366 + }, + { + "epoch": 4.110393802453196, + "grad_norm": 1.6137402808309138, + "learning_rate": 0.00010422540571334749, + "loss": 1.1269, + "step": 6367 + }, + { + "epoch": 4.11103938024532, + "grad_norm": 1.6108553340460061, + "learning_rate": 0.00010416388532164448, + "loss": 1.0676, + "step": 6368 + }, + { + "epoch": 4.1116849580374435, + "grad_norm": 1.7570880416891885, + "learning_rate": 0.0001041023734330166, + "loss": 1.1436, + "step": 6369 + }, + { + "epoch": 4.112330535829567, + "grad_norm": 1.4970217912664314, + "learning_rate": 0.00010404087005887498, + "loss": 0.8854, + "step": 6370 + }, + { + "epoch": 4.112976113621691, + "grad_norm": 1.5013582462441952, + "learning_rate": 0.00010397937521062916, + "loss": 0.8661, + "step": 6371 + }, + { + "epoch": 4.113621691413815, + "grad_norm": 1.7892815167043852, + "learning_rate": 0.00010391788889968701, + "loss": 1.2521, + "step": 6372 + }, + { + "epoch": 4.114267269205939, + "grad_norm": 1.630452021937754, + "learning_rate": 0.0001038564111374549, + "loss": 1.1063, + "step": 6373 + }, + { + "epoch": 4.114912846998063, + "grad_norm": 1.7271003790837873, + "learning_rate": 0.00010379494193533763, + "loss": 1.0276, + "step": 6374 + }, + { + "epoch": 4.115558424790187, + "grad_norm": 1.6967835566097356, + "learning_rate": 0.00010373348130473834, + "loss": 1.1154, + "step": 6375 + }, + { + "epoch": 4.116204002582311, + "grad_norm": 1.5573954493650073, + "learning_rate": 0.00010367202925705861, + "loss": 0.9284, + "step": 6376 + }, + { + "epoch": 4.116849580374435, + "grad_norm": 1.8161852101640987, + "learning_rate": 0.00010361058580369852, + "loss": 1.1646, + "step": 6377 + }, + { + "epoch": 4.117495158166559, + "grad_norm": 1.749679617956615, + "learning_rate": 0.00010354915095605632, + "loss": 1.1512, + "step": 6378 + }, + { + "epoch": 4.118140735958683, + "grad_norm": 1.7806686969485968, + "learning_rate": 0.00010348772472552893, + "loss": 1.3125, + "step": 6379 + }, + { + "epoch": 4.118786313750807, + "grad_norm": 1.7316552639277636, + "learning_rate": 0.00010342630712351155, + "loss": 1.0909, + "step": 6380 + }, + { + "epoch": 4.119431891542931, + "grad_norm": 1.694789271607417, + "learning_rate": 0.00010336489816139767, + "loss": 1.1593, + "step": 6381 + }, + { + "epoch": 4.120077469335055, + "grad_norm": 1.4985858725065722, + "learning_rate": 0.00010330349785057935, + "loss": 1.1217, + "step": 6382 + }, + { + "epoch": 4.1207230471271785, + "grad_norm": 1.685193742195559, + "learning_rate": 0.00010324210620244709, + "loss": 1.1608, + "step": 6383 + }, + { + "epoch": 4.1213686249193024, + "grad_norm": 1.5494757169895077, + "learning_rate": 0.00010318072322838956, + "loss": 0.9396, + "step": 6384 + }, + { + "epoch": 4.122014202711426, + "grad_norm": 3.4626960383354417, + "learning_rate": 0.00010311934893979394, + "loss": 1.2094, + "step": 6385 + }, + { + "epoch": 4.12265978050355, + "grad_norm": 1.9512395659031634, + "learning_rate": 0.00010305798334804584, + "loss": 0.9508, + "step": 6386 + }, + { + "epoch": 4.123305358295674, + "grad_norm": 1.4362970982168282, + "learning_rate": 0.00010299662646452925, + "loss": 0.9434, + "step": 6387 + }, + { + "epoch": 4.123950936087798, + "grad_norm": 1.59610772723037, + "learning_rate": 0.00010293527830062639, + "loss": 1.0512, + "step": 6388 + }, + { + "epoch": 4.124596513879922, + "grad_norm": 1.7361146328120503, + "learning_rate": 0.00010287393886771807, + "loss": 1.1682, + "step": 6389 + }, + { + "epoch": 4.125242091672046, + "grad_norm": 1.565328750984822, + "learning_rate": 0.00010281260817718338, + "loss": 1.0212, + "step": 6390 + }, + { + "epoch": 4.125887669464171, + "grad_norm": 1.479973139088652, + "learning_rate": 0.00010275128624039981, + "loss": 0.9651, + "step": 6391 + }, + { + "epoch": 4.126533247256295, + "grad_norm": 1.6742167494420104, + "learning_rate": 0.00010268997306874314, + "loss": 1.1396, + "step": 6392 + }, + { + "epoch": 4.127178825048419, + "grad_norm": 1.7759292776126359, + "learning_rate": 0.00010262866867358773, + "loss": 1.0855, + "step": 6393 + }, + { + "epoch": 4.127824402840543, + "grad_norm": 1.7261688066267222, + "learning_rate": 0.0001025673730663061, + "loss": 1.1892, + "step": 6394 + }, + { + "epoch": 4.128469980632667, + "grad_norm": 1.6503423531587558, + "learning_rate": 0.00010250608625826919, + "loss": 0.9614, + "step": 6395 + }, + { + "epoch": 4.1291155584247905, + "grad_norm": 1.716812515051758, + "learning_rate": 0.00010244480826084645, + "loss": 1.0663, + "step": 6396 + }, + { + "epoch": 4.129761136216914, + "grad_norm": 1.5872271716800033, + "learning_rate": 0.00010238353908540546, + "loss": 0.9419, + "step": 6397 + }, + { + "epoch": 4.130406714009038, + "grad_norm": 1.5885461877077562, + "learning_rate": 0.00010232227874331233, + "loss": 1.0166, + "step": 6398 + }, + { + "epoch": 4.131052291801162, + "grad_norm": 1.7456638283964585, + "learning_rate": 0.00010226102724593157, + "loss": 1.0497, + "step": 6399 + }, + { + "epoch": 4.131697869593286, + "grad_norm": 1.6856200031212725, + "learning_rate": 0.00010219978460462582, + "loss": 1.0503, + "step": 6400 + }, + { + "epoch": 4.13234344738541, + "grad_norm": 1.528704171953556, + "learning_rate": 0.00010213855083075636, + "loss": 1.0031, + "step": 6401 + }, + { + "epoch": 4.132989025177534, + "grad_norm": 2.0840234463944602, + "learning_rate": 0.00010207732593568266, + "loss": 1.1147, + "step": 6402 + }, + { + "epoch": 4.133634602969658, + "grad_norm": 1.5207972312679185, + "learning_rate": 0.0001020161099307625, + "loss": 1.0379, + "step": 6403 + }, + { + "epoch": 4.134280180761782, + "grad_norm": 1.8704035521155744, + "learning_rate": 0.00010195490282735216, + "loss": 1.2634, + "step": 6404 + }, + { + "epoch": 4.134925758553906, + "grad_norm": 1.626696994112736, + "learning_rate": 0.0001018937046368062, + "loss": 1.0981, + "step": 6405 + }, + { + "epoch": 4.13557133634603, + "grad_norm": 1.6949782634207413, + "learning_rate": 0.00010183251537047744, + "loss": 1.1926, + "step": 6406 + }, + { + "epoch": 4.136216914138154, + "grad_norm": 1.4732964188805178, + "learning_rate": 0.00010177133503971715, + "loss": 0.9997, + "step": 6407 + }, + { + "epoch": 4.136862491930278, + "grad_norm": 1.4461700235854638, + "learning_rate": 0.00010171016365587503, + "loss": 0.9912, + "step": 6408 + }, + { + "epoch": 4.137508069722402, + "grad_norm": 1.952411231249537, + "learning_rate": 0.00010164900123029888, + "loss": 1.2071, + "step": 6409 + }, + { + "epoch": 4.1381536475145255, + "grad_norm": 2.01197806936017, + "learning_rate": 0.00010158784777433496, + "loss": 1.3099, + "step": 6410 + }, + { + "epoch": 4.1387992253066495, + "grad_norm": 1.7383786795277292, + "learning_rate": 0.00010152670329932794, + "loss": 1.1171, + "step": 6411 + }, + { + "epoch": 4.139444803098773, + "grad_norm": 1.3723861011532106, + "learning_rate": 0.00010146556781662078, + "loss": 1.0053, + "step": 6412 + }, + { + "epoch": 4.140090380890897, + "grad_norm": 1.584433134920285, + "learning_rate": 0.0001014044413375546, + "loss": 1.0163, + "step": 6413 + }, + { + "epoch": 4.140735958683021, + "grad_norm": 1.7621756384613458, + "learning_rate": 0.00010134332387346914, + "loss": 1.2125, + "step": 6414 + }, + { + "epoch": 4.141381536475145, + "grad_norm": 1.7785826720324853, + "learning_rate": 0.00010128221543570231, + "loss": 1.1988, + "step": 6415 + }, + { + "epoch": 4.142027114267269, + "grad_norm": 1.4852402072770334, + "learning_rate": 0.00010122111603559029, + "loss": 0.878, + "step": 6416 + }, + { + "epoch": 4.142672692059393, + "grad_norm": 1.6927064586590095, + "learning_rate": 0.00010116002568446766, + "loss": 1.0105, + "step": 6417 + }, + { + "epoch": 4.143318269851517, + "grad_norm": 1.4818003979146326, + "learning_rate": 0.00010109894439366743, + "loss": 1.0032, + "step": 6418 + }, + { + "epoch": 4.143963847643641, + "grad_norm": 1.6853625371767442, + "learning_rate": 0.00010103787217452068, + "loss": 1.1412, + "step": 6419 + }, + { + "epoch": 4.144609425435765, + "grad_norm": 1.4312626205796273, + "learning_rate": 0.00010097680903835694, + "loss": 0.8744, + "step": 6420 + }, + { + "epoch": 4.145255003227889, + "grad_norm": 1.5264824894015923, + "learning_rate": 0.00010091575499650418, + "loss": 1.1792, + "step": 6421 + }, + { + "epoch": 4.145900581020013, + "grad_norm": 1.5422618219535549, + "learning_rate": 0.00010085471006028845, + "loss": 0.9231, + "step": 6422 + }, + { + "epoch": 4.146546158812137, + "grad_norm": 1.5683710195160607, + "learning_rate": 0.00010079367424103423, + "loss": 0.9769, + "step": 6423 + }, + { + "epoch": 4.1471917366042605, + "grad_norm": 1.7185501160839454, + "learning_rate": 0.00010073264755006438, + "loss": 1.1648, + "step": 6424 + }, + { + "epoch": 4.1478373143963845, + "grad_norm": 1.6882313621842544, + "learning_rate": 0.00010067162999869984, + "loss": 0.9485, + "step": 6425 + }, + { + "epoch": 4.148482892188508, + "grad_norm": 1.7567592947579742, + "learning_rate": 0.00010061062159826013, + "loss": 1.0787, + "step": 6426 + }, + { + "epoch": 4.149128469980632, + "grad_norm": 1.8818631837909008, + "learning_rate": 0.00010054962236006292, + "loss": 1.1249, + "step": 6427 + }, + { + "epoch": 4.149774047772756, + "grad_norm": 1.8358947699840553, + "learning_rate": 0.00010048863229542413, + "loss": 1.1545, + "step": 6428 + }, + { + "epoch": 4.15041962556488, + "grad_norm": 1.8348752017462449, + "learning_rate": 0.00010042765141565812, + "loss": 1.288, + "step": 6429 + }, + { + "epoch": 4.151065203357004, + "grad_norm": 1.4369789176204872, + "learning_rate": 0.00010036667973207746, + "loss": 0.971, + "step": 6430 + }, + { + "epoch": 4.151710781149128, + "grad_norm": 1.6047324496955315, + "learning_rate": 0.000100305717255993, + "loss": 1.0061, + "step": 6431 + }, + { + "epoch": 4.152356358941253, + "grad_norm": 1.6450054911148602, + "learning_rate": 0.00010024476399871391, + "loss": 1.0317, + "step": 6432 + }, + { + "epoch": 4.153001936733377, + "grad_norm": 1.6487114164409082, + "learning_rate": 0.00010018381997154776, + "loss": 1.0973, + "step": 6433 + }, + { + "epoch": 4.153647514525501, + "grad_norm": 1.7154237708553601, + "learning_rate": 0.00010012288518580016, + "loss": 1.1984, + "step": 6434 + }, + { + "epoch": 4.154293092317625, + "grad_norm": 1.6485591321484148, + "learning_rate": 0.00010006195965277522, + "loss": 1.139, + "step": 6435 + }, + { + "epoch": 4.154938670109749, + "grad_norm": 1.5476101375227098, + "learning_rate": 0.00010000104338377523, + "loss": 0.9824, + "step": 6436 + }, + { + "epoch": 4.1555842479018725, + "grad_norm": 2.1282282086605084, + "learning_rate": 9.994013639010086e-05, + "loss": 0.9449, + "step": 6437 + }, + { + "epoch": 4.1562298256939965, + "grad_norm": 1.5834388519872733, + "learning_rate": 9.987923868305085e-05, + "loss": 1.1286, + "step": 6438 + }, + { + "epoch": 4.15687540348612, + "grad_norm": 1.5599076701428565, + "learning_rate": 9.981835027392244e-05, + "loss": 1.1155, + "step": 6439 + }, + { + "epoch": 4.157520981278244, + "grad_norm": 1.3289254830578732, + "learning_rate": 9.975747117401114e-05, + "loss": 1.0168, + "step": 6440 + }, + { + "epoch": 4.158166559070368, + "grad_norm": 1.7175196879718846, + "learning_rate": 9.969660139461053e-05, + "loss": 1.0679, + "step": 6441 + }, + { + "epoch": 4.158812136862492, + "grad_norm": 1.5634996160067909, + "learning_rate": 9.963574094701263e-05, + "loss": 1.0184, + "step": 6442 + }, + { + "epoch": 4.159457714654616, + "grad_norm": 1.7361212495092935, + "learning_rate": 9.957488984250772e-05, + "loss": 1.2022, + "step": 6443 + }, + { + "epoch": 4.16010329244674, + "grad_norm": 1.4593349387471914, + "learning_rate": 9.951404809238425e-05, + "loss": 0.8949, + "step": 6444 + }, + { + "epoch": 4.160748870238864, + "grad_norm": 1.6980830245064273, + "learning_rate": 9.9453215707929e-05, + "loss": 1.2735, + "step": 6445 + }, + { + "epoch": 4.161394448030988, + "grad_norm": 1.754601060651511, + "learning_rate": 9.939239270042711e-05, + "loss": 1.041, + "step": 6446 + }, + { + "epoch": 4.162040025823112, + "grad_norm": 1.478307775335892, + "learning_rate": 9.933157908116177e-05, + "loss": 0.9772, + "step": 6447 + }, + { + "epoch": 4.162685603615236, + "grad_norm": 1.490525138747878, + "learning_rate": 9.927077486141451e-05, + "loss": 0.9832, + "step": 6448 + }, + { + "epoch": 4.16333118140736, + "grad_norm": 1.4746480711605627, + "learning_rate": 9.92099800524653e-05, + "loss": 0.9159, + "step": 6449 + }, + { + "epoch": 4.163976759199484, + "grad_norm": 1.9151577053193027, + "learning_rate": 9.914919466559203e-05, + "loss": 1.0844, + "step": 6450 + }, + { + "epoch": 4.1646223369916076, + "grad_norm": 1.728537847369111, + "learning_rate": 9.908841871207117e-05, + "loss": 1.0672, + "step": 6451 + }, + { + "epoch": 4.1652679147837315, + "grad_norm": 1.6330670117295156, + "learning_rate": 9.902765220317722e-05, + "loss": 1.0007, + "step": 6452 + }, + { + "epoch": 4.165913492575855, + "grad_norm": 1.8900565982172093, + "learning_rate": 9.896689515018297e-05, + "loss": 0.9937, + "step": 6453 + }, + { + "epoch": 4.166559070367979, + "grad_norm": 1.8064948177921762, + "learning_rate": 9.890614756435953e-05, + "loss": 1.0621, + "step": 6454 + }, + { + "epoch": 4.167204648160103, + "grad_norm": 1.4550061016240265, + "learning_rate": 9.884540945697623e-05, + "loss": 0.8963, + "step": 6455 + }, + { + "epoch": 4.167850225952227, + "grad_norm": 1.7351802643156244, + "learning_rate": 9.878468083930051e-05, + "loss": 1.0246, + "step": 6456 + }, + { + "epoch": 4.168495803744351, + "grad_norm": 1.972815987779011, + "learning_rate": 9.872396172259826e-05, + "loss": 1.2122, + "step": 6457 + }, + { + "epoch": 4.169141381536475, + "grad_norm": 1.5883836547255348, + "learning_rate": 9.866325211813353e-05, + "loss": 0.9578, + "step": 6458 + }, + { + "epoch": 4.169786959328599, + "grad_norm": 1.731619841604884, + "learning_rate": 9.860255203716848e-05, + "loss": 0.9814, + "step": 6459 + }, + { + "epoch": 4.170432537120723, + "grad_norm": 1.7472098795278868, + "learning_rate": 9.854186149096365e-05, + "loss": 1.0419, + "step": 6460 + }, + { + "epoch": 4.171078114912847, + "grad_norm": 1.714655886189496, + "learning_rate": 9.84811804907778e-05, + "loss": 1.1741, + "step": 6461 + }, + { + "epoch": 4.171723692704971, + "grad_norm": 1.6396376121573093, + "learning_rate": 9.84205090478679e-05, + "loss": 0.9703, + "step": 6462 + }, + { + "epoch": 4.172369270497095, + "grad_norm": 1.7329133679786237, + "learning_rate": 9.835984717348902e-05, + "loss": 1.0539, + "step": 6463 + }, + { + "epoch": 4.173014848289219, + "grad_norm": 1.5878419280522251, + "learning_rate": 9.829919487889464e-05, + "loss": 1.0761, + "step": 6464 + }, + { + "epoch": 4.173660426081343, + "grad_norm": 1.4416075716129142, + "learning_rate": 9.823855217533645e-05, + "loss": 0.9952, + "step": 6465 + }, + { + "epoch": 4.1743060038734665, + "grad_norm": 1.4429372453243117, + "learning_rate": 9.81779190740642e-05, + "loss": 0.9118, + "step": 6466 + }, + { + "epoch": 4.17495158166559, + "grad_norm": 1.585735376372378, + "learning_rate": 9.8117295586326e-05, + "loss": 1.0134, + "step": 6467 + }, + { + "epoch": 4.175597159457714, + "grad_norm": 1.717085233796156, + "learning_rate": 9.805668172336818e-05, + "loss": 1.1138, + "step": 6468 + }, + { + "epoch": 4.176242737249838, + "grad_norm": 1.6069563219488772, + "learning_rate": 9.799607749643516e-05, + "loss": 1.0202, + "step": 6469 + }, + { + "epoch": 4.176888315041962, + "grad_norm": 1.519140287668186, + "learning_rate": 9.793548291676967e-05, + "loss": 0.922, + "step": 6470 + }, + { + "epoch": 4.177533892834086, + "grad_norm": 1.8114658398931889, + "learning_rate": 9.787489799561273e-05, + "loss": 1.0118, + "step": 6471 + }, + { + "epoch": 4.17817947062621, + "grad_norm": 1.5594629942648233, + "learning_rate": 9.781432274420335e-05, + "loss": 0.9971, + "step": 6472 + }, + { + "epoch": 4.178825048418334, + "grad_norm": 1.7186389665322932, + "learning_rate": 9.775375717377887e-05, + "loss": 1.0944, + "step": 6473 + }, + { + "epoch": 4.179470626210458, + "grad_norm": 1.5695751932646935, + "learning_rate": 9.769320129557496e-05, + "loss": 0.999, + "step": 6474 + }, + { + "epoch": 4.180116204002582, + "grad_norm": 1.5745595364826979, + "learning_rate": 9.763265512082523e-05, + "loss": 1.0325, + "step": 6475 + }, + { + "epoch": 4.180761781794706, + "grad_norm": 1.7621221517175025, + "learning_rate": 9.757211866076168e-05, + "loss": 1.1128, + "step": 6476 + }, + { + "epoch": 4.181407359586831, + "grad_norm": 1.883058680016446, + "learning_rate": 9.751159192661449e-05, + "loss": 1.076, + "step": 6477 + }, + { + "epoch": 4.182052937378955, + "grad_norm": 1.3987694877832448, + "learning_rate": 9.74510749296119e-05, + "loss": 0.8056, + "step": 6478 + }, + { + "epoch": 4.1826985151710785, + "grad_norm": 1.6758816143145756, + "learning_rate": 9.739056768098051e-05, + "loss": 1.0771, + "step": 6479 + }, + { + "epoch": 4.183344092963202, + "grad_norm": 1.6158045949366586, + "learning_rate": 9.733007019194508e-05, + "loss": 1.0651, + "step": 6480 + }, + { + "epoch": 4.183989670755326, + "grad_norm": 1.7341253248684607, + "learning_rate": 9.726958247372838e-05, + "loss": 1.0625, + "step": 6481 + }, + { + "epoch": 4.18463524854745, + "grad_norm": 1.8811269274552032, + "learning_rate": 9.720910453755161e-05, + "loss": 1.0913, + "step": 6482 + }, + { + "epoch": 4.185280826339574, + "grad_norm": 1.8086784874415898, + "learning_rate": 9.714863639463413e-05, + "loss": 1.1947, + "step": 6483 + }, + { + "epoch": 4.185926404131698, + "grad_norm": 1.737846249562487, + "learning_rate": 9.70881780561933e-05, + "loss": 1.0736, + "step": 6484 + }, + { + "epoch": 4.186571981923822, + "grad_norm": 1.4925646005249482, + "learning_rate": 9.702772953344473e-05, + "loss": 0.9383, + "step": 6485 + }, + { + "epoch": 4.187217559715946, + "grad_norm": 1.6053698053474825, + "learning_rate": 9.696729083760236e-05, + "loss": 1.0652, + "step": 6486 + }, + { + "epoch": 4.18786313750807, + "grad_norm": 1.8444311150223183, + "learning_rate": 9.690686197987818e-05, + "loss": 1.0714, + "step": 6487 + }, + { + "epoch": 4.188508715300194, + "grad_norm": 1.838519972432188, + "learning_rate": 9.684644297148226e-05, + "loss": 1.044, + "step": 6488 + }, + { + "epoch": 4.189154293092318, + "grad_norm": 1.6384902462423536, + "learning_rate": 9.678603382362305e-05, + "loss": 1.0362, + "step": 6489 + }, + { + "epoch": 4.189799870884442, + "grad_norm": 1.4976339161438634, + "learning_rate": 9.67256345475071e-05, + "loss": 1.0134, + "step": 6490 + }, + { + "epoch": 4.190445448676566, + "grad_norm": 1.509295110351517, + "learning_rate": 9.666524515433905e-05, + "loss": 0.9066, + "step": 6491 + }, + { + "epoch": 4.19109102646869, + "grad_norm": 1.901634154463941, + "learning_rate": 9.660486565532173e-05, + "loss": 1.2434, + "step": 6492 + }, + { + "epoch": 4.1917366042608135, + "grad_norm": 1.7802205169163594, + "learning_rate": 9.654449606165626e-05, + "loss": 1.1572, + "step": 6493 + }, + { + "epoch": 4.1923821820529374, + "grad_norm": 1.872961348216379, + "learning_rate": 9.648413638454174e-05, + "loss": 1.1437, + "step": 6494 + }, + { + "epoch": 4.193027759845061, + "grad_norm": 1.4850627976707136, + "learning_rate": 9.642378663517552e-05, + "loss": 1.0379, + "step": 6495 + }, + { + "epoch": 4.193673337637185, + "grad_norm": 1.7453630225911776, + "learning_rate": 9.636344682475318e-05, + "loss": 1.1062, + "step": 6496 + }, + { + "epoch": 4.194318915429309, + "grad_norm": 1.48894862961732, + "learning_rate": 9.630311696446828e-05, + "loss": 0.9123, + "step": 6497 + }, + { + "epoch": 4.194964493221433, + "grad_norm": 1.4468486098514055, + "learning_rate": 9.62427970655127e-05, + "loss": 0.9798, + "step": 6498 + }, + { + "epoch": 4.195610071013557, + "grad_norm": 1.623690018357401, + "learning_rate": 9.618248713907644e-05, + "loss": 0.933, + "step": 6499 + }, + { + "epoch": 4.196255648805681, + "grad_norm": 1.747947915198071, + "learning_rate": 9.61221871963475e-05, + "loss": 1.1795, + "step": 6500 + }, + { + "epoch": 4.196901226597805, + "grad_norm": 1.7751567821097065, + "learning_rate": 9.606189724851226e-05, + "loss": 1.104, + "step": 6501 + }, + { + "epoch": 4.197546804389929, + "grad_norm": 1.7947884547317678, + "learning_rate": 9.600161730675513e-05, + "loss": 1.1724, + "step": 6502 + }, + { + "epoch": 4.198192382182053, + "grad_norm": 1.7433630362681425, + "learning_rate": 9.594134738225855e-05, + "loss": 1.1584, + "step": 6503 + }, + { + "epoch": 4.198837959974177, + "grad_norm": 1.902203944166715, + "learning_rate": 9.588108748620331e-05, + "loss": 1.2467, + "step": 6504 + }, + { + "epoch": 4.199483537766301, + "grad_norm": 1.5921889656752635, + "learning_rate": 9.582083762976828e-05, + "loss": 1.0212, + "step": 6505 + }, + { + "epoch": 4.200129115558425, + "grad_norm": 1.7517412434362873, + "learning_rate": 9.57605978241303e-05, + "loss": 1.0912, + "step": 6506 + }, + { + "epoch": 4.2007746933505485, + "grad_norm": 1.7633372563359024, + "learning_rate": 9.570036808046458e-05, + "loss": 1.0913, + "step": 6507 + }, + { + "epoch": 4.2014202711426725, + "grad_norm": 1.8644669845318107, + "learning_rate": 9.564014840994438e-05, + "loss": 1.0955, + "step": 6508 + }, + { + "epoch": 4.202065848934796, + "grad_norm": 2.215196463049144, + "learning_rate": 9.557993882374103e-05, + "loss": 1.2234, + "step": 6509 + }, + { + "epoch": 4.20271142672692, + "grad_norm": 1.9085354299318678, + "learning_rate": 9.551973933302399e-05, + "loss": 1.3839, + "step": 6510 + }, + { + "epoch": 4.203357004519044, + "grad_norm": 1.708121947378604, + "learning_rate": 9.5459549948961e-05, + "loss": 1.063, + "step": 6511 + }, + { + "epoch": 4.204002582311168, + "grad_norm": 1.5520479919297487, + "learning_rate": 9.539937068271776e-05, + "loss": 0.8537, + "step": 6512 + }, + { + "epoch": 4.204648160103292, + "grad_norm": 1.6378265302596529, + "learning_rate": 9.53392015454581e-05, + "loss": 1.1774, + "step": 6513 + }, + { + "epoch": 4.205293737895416, + "grad_norm": 1.682698196028761, + "learning_rate": 9.527904254834405e-05, + "loss": 1.1457, + "step": 6514 + }, + { + "epoch": 4.20593931568754, + "grad_norm": 1.6341198982662752, + "learning_rate": 9.521889370253583e-05, + "loss": 0.9381, + "step": 6515 + }, + { + "epoch": 4.206584893479664, + "grad_norm": 1.5666122435056593, + "learning_rate": 9.515875501919152e-05, + "loss": 1.0734, + "step": 6516 + }, + { + "epoch": 4.207230471271788, + "grad_norm": 1.54523191446398, + "learning_rate": 9.509862650946753e-05, + "loss": 0.9881, + "step": 6517 + }, + { + "epoch": 4.207876049063912, + "grad_norm": 1.7283070888973515, + "learning_rate": 9.50385081845184e-05, + "loss": 1.1087, + "step": 6518 + }, + { + "epoch": 4.208521626856037, + "grad_norm": 1.6393251157805901, + "learning_rate": 9.497840005549661e-05, + "loss": 0.9026, + "step": 6519 + }, + { + "epoch": 4.2091672046481605, + "grad_norm": 1.9400895474035156, + "learning_rate": 9.491830213355282e-05, + "loss": 1.1774, + "step": 6520 + }, + { + "epoch": 4.2098127824402845, + "grad_norm": 1.6255878005140847, + "learning_rate": 9.485821442983593e-05, + "loss": 0.9905, + "step": 6521 + }, + { + "epoch": 4.210458360232408, + "grad_norm": 1.4965436014135163, + "learning_rate": 9.47981369554927e-05, + "loss": 1.0379, + "step": 6522 + }, + { + "epoch": 4.211103938024532, + "grad_norm": 1.609085887728637, + "learning_rate": 9.473806972166824e-05, + "loss": 1.0777, + "step": 6523 + }, + { + "epoch": 4.211749515816656, + "grad_norm": 1.7035966266955822, + "learning_rate": 9.467801273950562e-05, + "loss": 1.0137, + "step": 6524 + }, + { + "epoch": 4.21239509360878, + "grad_norm": 1.762571078307789, + "learning_rate": 9.461796602014598e-05, + "loss": 1.0663, + "step": 6525 + }, + { + "epoch": 4.213040671400904, + "grad_norm": 2.056209456695779, + "learning_rate": 9.455792957472864e-05, + "loss": 0.998, + "step": 6526 + }, + { + "epoch": 4.213686249193028, + "grad_norm": 1.817612831408731, + "learning_rate": 9.449790341439104e-05, + "loss": 1.1278, + "step": 6527 + }, + { + "epoch": 4.214331826985152, + "grad_norm": 1.7201083375240098, + "learning_rate": 9.443788755026853e-05, + "loss": 1.0802, + "step": 6528 + }, + { + "epoch": 4.214977404777276, + "grad_norm": 1.8288681957263258, + "learning_rate": 9.437788199349478e-05, + "loss": 1.1774, + "step": 6529 + }, + { + "epoch": 4.2156229825694, + "grad_norm": 1.6409203198218534, + "learning_rate": 9.431788675520144e-05, + "loss": 1.0512, + "step": 6530 + }, + { + "epoch": 4.216268560361524, + "grad_norm": 1.9637426865076788, + "learning_rate": 9.425790184651818e-05, + "loss": 1.2245, + "step": 6531 + }, + { + "epoch": 4.216914138153648, + "grad_norm": 1.9622607107511463, + "learning_rate": 9.419792727857285e-05, + "loss": 1.2281, + "step": 6532 + }, + { + "epoch": 4.217559715945772, + "grad_norm": 1.9513261488407265, + "learning_rate": 9.413796306249142e-05, + "loss": 1.2728, + "step": 6533 + }, + { + "epoch": 4.2182052937378955, + "grad_norm": 1.3796486883693257, + "learning_rate": 9.407800920939781e-05, + "loss": 0.827, + "step": 6534 + }, + { + "epoch": 4.2188508715300195, + "grad_norm": 1.7898493999627452, + "learning_rate": 9.401806573041407e-05, + "loss": 1.1738, + "step": 6535 + }, + { + "epoch": 4.219496449322143, + "grad_norm": 1.581900365575535, + "learning_rate": 9.395813263666039e-05, + "loss": 1.0352, + "step": 6536 + }, + { + "epoch": 4.220142027114267, + "grad_norm": 1.6407049438563146, + "learning_rate": 9.389820993925499e-05, + "loss": 1.1279, + "step": 6537 + }, + { + "epoch": 4.220787604906391, + "grad_norm": 1.539479616839468, + "learning_rate": 9.383829764931407e-05, + "loss": 0.9206, + "step": 6538 + }, + { + "epoch": 4.221433182698515, + "grad_norm": 1.706648735837082, + "learning_rate": 9.377839577795202e-05, + "loss": 1.0848, + "step": 6539 + }, + { + "epoch": 4.222078760490639, + "grad_norm": 1.4377781375279357, + "learning_rate": 9.371850433628131e-05, + "loss": 0.8747, + "step": 6540 + }, + { + "epoch": 4.222724338282763, + "grad_norm": 1.744607750391415, + "learning_rate": 9.365862333541237e-05, + "loss": 1.0144, + "step": 6541 + }, + { + "epoch": 4.223369916074887, + "grad_norm": 1.6728929175945373, + "learning_rate": 9.359875278645374e-05, + "loss": 1.0609, + "step": 6542 + }, + { + "epoch": 4.224015493867011, + "grad_norm": 1.5361435266945742, + "learning_rate": 9.353889270051212e-05, + "loss": 0.9685, + "step": 6543 + }, + { + "epoch": 4.224661071659135, + "grad_norm": 1.6628706101384372, + "learning_rate": 9.347904308869207e-05, + "loss": 0.9913, + "step": 6544 + }, + { + "epoch": 4.225306649451259, + "grad_norm": 1.8411966745396753, + "learning_rate": 9.34192039620963e-05, + "loss": 1.118, + "step": 6545 + }, + { + "epoch": 4.225952227243383, + "grad_norm": 1.4936210671254064, + "learning_rate": 9.335937533182572e-05, + "loss": 0.962, + "step": 6546 + }, + { + "epoch": 4.226597805035507, + "grad_norm": 1.6668538438809066, + "learning_rate": 9.329955720897902e-05, + "loss": 1.01, + "step": 6547 + }, + { + "epoch": 4.227243382827631, + "grad_norm": 1.9652567615705503, + "learning_rate": 9.323974960465316e-05, + "loss": 1.355, + "step": 6548 + }, + { + "epoch": 4.2278889606197545, + "grad_norm": 1.6188447362874157, + "learning_rate": 9.31799525299431e-05, + "loss": 0.9691, + "step": 6549 + }, + { + "epoch": 4.228534538411878, + "grad_norm": 1.8087155912715691, + "learning_rate": 9.312016599594173e-05, + "loss": 1.2704, + "step": 6550 + }, + { + "epoch": 4.229180116204002, + "grad_norm": 1.7393636039599552, + "learning_rate": 9.306039001374014e-05, + "loss": 1.0496, + "step": 6551 + }, + { + "epoch": 4.229825693996126, + "grad_norm": 1.8591881501226044, + "learning_rate": 9.30006245944274e-05, + "loss": 1.1581, + "step": 6552 + }, + { + "epoch": 4.23047127178825, + "grad_norm": 1.4813343347940198, + "learning_rate": 9.294086974909053e-05, + "loss": 1.0822, + "step": 6553 + }, + { + "epoch": 4.231116849580374, + "grad_norm": 1.982759990103224, + "learning_rate": 9.288112548881477e-05, + "loss": 1.128, + "step": 6554 + }, + { + "epoch": 4.231762427372498, + "grad_norm": 1.7113057950929027, + "learning_rate": 9.282139182468328e-05, + "loss": 1.0947, + "step": 6555 + }, + { + "epoch": 4.232408005164622, + "grad_norm": 1.7123704166173075, + "learning_rate": 9.276166876777722e-05, + "loss": 0.9612, + "step": 6556 + }, + { + "epoch": 4.233053582956746, + "grad_norm": 1.4440309117910204, + "learning_rate": 9.270195632917586e-05, + "loss": 0.8775, + "step": 6557 + }, + { + "epoch": 4.23369916074887, + "grad_norm": 1.488749612598994, + "learning_rate": 9.264225451995655e-05, + "loss": 0.9246, + "step": 6558 + }, + { + "epoch": 4.234344738540994, + "grad_norm": 1.8679061688330962, + "learning_rate": 9.258256335119453e-05, + "loss": 0.8831, + "step": 6559 + }, + { + "epoch": 4.234990316333118, + "grad_norm": 1.4611179440685267, + "learning_rate": 9.252288283396309e-05, + "loss": 0.8807, + "step": 6560 + }, + { + "epoch": 4.235635894125242, + "grad_norm": 1.9202085305661685, + "learning_rate": 9.246321297933367e-05, + "loss": 1.2725, + "step": 6561 + }, + { + "epoch": 4.236281471917366, + "grad_norm": 1.362099159009515, + "learning_rate": 9.240355379837565e-05, + "loss": 0.8197, + "step": 6562 + }, + { + "epoch": 4.23692704970949, + "grad_norm": 2.3350581593020565, + "learning_rate": 9.234390530215634e-05, + "loss": 1.1464, + "step": 6563 + }, + { + "epoch": 4.237572627501614, + "grad_norm": 1.6960143058597517, + "learning_rate": 9.228426750174119e-05, + "loss": 1.0167, + "step": 6564 + }, + { + "epoch": 4.238218205293738, + "grad_norm": 1.8096962746406062, + "learning_rate": 9.222464040819372e-05, + "loss": 1.0783, + "step": 6565 + }, + { + "epoch": 4.238863783085862, + "grad_norm": 2.233859167276724, + "learning_rate": 9.216502403257527e-05, + "loss": 1.1714, + "step": 6566 + }, + { + "epoch": 4.239509360877986, + "grad_norm": 1.819800542048357, + "learning_rate": 9.21054183859453e-05, + "loss": 0.9392, + "step": 6567 + }, + { + "epoch": 4.24015493867011, + "grad_norm": 1.5613153887513576, + "learning_rate": 9.204582347936135e-05, + "loss": 0.9158, + "step": 6568 + }, + { + "epoch": 4.240800516462234, + "grad_norm": 2.027212831529722, + "learning_rate": 9.198623932387884e-05, + "loss": 1.1359, + "step": 6569 + }, + { + "epoch": 4.241446094254358, + "grad_norm": 1.8383891729092026, + "learning_rate": 9.192666593055122e-05, + "loss": 0.9647, + "step": 6570 + }, + { + "epoch": 4.242091672046482, + "grad_norm": 1.7533306226625922, + "learning_rate": 9.186710331043007e-05, + "loss": 1.1383, + "step": 6571 + }, + { + "epoch": 4.242737249838606, + "grad_norm": 1.616135813249375, + "learning_rate": 9.180755147456477e-05, + "loss": 0.9844, + "step": 6572 + }, + { + "epoch": 4.24338282763073, + "grad_norm": 1.6543307341172195, + "learning_rate": 9.174801043400287e-05, + "loss": 1.0191, + "step": 6573 + }, + { + "epoch": 4.244028405422854, + "grad_norm": 2.1292435972164334, + "learning_rate": 9.168848019978986e-05, + "loss": 1.1725, + "step": 6574 + }, + { + "epoch": 4.244673983214978, + "grad_norm": 1.6282713698983284, + "learning_rate": 9.162896078296913e-05, + "loss": 1.0196, + "step": 6575 + }, + { + "epoch": 4.2453195610071015, + "grad_norm": 1.69206558845249, + "learning_rate": 9.156945219458224e-05, + "loss": 1.1545, + "step": 6576 + }, + { + "epoch": 4.245965138799225, + "grad_norm": 2.132059070180035, + "learning_rate": 9.150995444566865e-05, + "loss": 1.1193, + "step": 6577 + }, + { + "epoch": 4.246610716591349, + "grad_norm": 1.6811781486816928, + "learning_rate": 9.145046754726571e-05, + "loss": 1.1114, + "step": 6578 + }, + { + "epoch": 4.247256294383473, + "grad_norm": 1.8334772065329612, + "learning_rate": 9.139099151040896e-05, + "loss": 1.2879, + "step": 6579 + }, + { + "epoch": 4.247901872175597, + "grad_norm": 1.8444668001979205, + "learning_rate": 9.133152634613181e-05, + "loss": 1.166, + "step": 6580 + }, + { + "epoch": 4.248547449967721, + "grad_norm": 1.7466349837240596, + "learning_rate": 9.12720720654656e-05, + "loss": 1.0735, + "step": 6581 + }, + { + "epoch": 4.249193027759845, + "grad_norm": 1.840737127838793, + "learning_rate": 9.121262867943977e-05, + "loss": 1.0691, + "step": 6582 + }, + { + "epoch": 4.249838605551969, + "grad_norm": 1.5987872870988797, + "learning_rate": 9.11531961990817e-05, + "loss": 0.9406, + "step": 6583 + }, + { + "epoch": 4.250484183344093, + "grad_norm": 1.7372444320596685, + "learning_rate": 9.109377463541671e-05, + "loss": 1.0212, + "step": 6584 + }, + { + "epoch": 4.251129761136217, + "grad_norm": 1.6680136910165329, + "learning_rate": 9.103436399946808e-05, + "loss": 0.9864, + "step": 6585 + }, + { + "epoch": 4.251775338928341, + "grad_norm": 1.553857555438631, + "learning_rate": 9.097496430225717e-05, + "loss": 1.0204, + "step": 6586 + }, + { + "epoch": 4.252420916720465, + "grad_norm": 1.8643143980817218, + "learning_rate": 9.091557555480322e-05, + "loss": 1.2393, + "step": 6587 + }, + { + "epoch": 4.253066494512589, + "grad_norm": 1.4713618750578283, + "learning_rate": 9.08561977681234e-05, + "loss": 0.8997, + "step": 6588 + }, + { + "epoch": 4.253712072304713, + "grad_norm": 1.8269573796746825, + "learning_rate": 9.079683095323293e-05, + "loss": 1.0669, + "step": 6589 + }, + { + "epoch": 4.2543576500968365, + "grad_norm": 1.8337388926889229, + "learning_rate": 9.073747512114506e-05, + "loss": 1.1251, + "step": 6590 + }, + { + "epoch": 4.2550032278889605, + "grad_norm": 1.7337051439017117, + "learning_rate": 9.067813028287081e-05, + "loss": 0.8934, + "step": 6591 + }, + { + "epoch": 4.255648805681084, + "grad_norm": 1.900397291244637, + "learning_rate": 9.061879644941928e-05, + "loss": 1.167, + "step": 6592 + }, + { + "epoch": 4.256294383473208, + "grad_norm": 1.7362553299989059, + "learning_rate": 9.055947363179758e-05, + "loss": 1.0668, + "step": 6593 + }, + { + "epoch": 4.256939961265332, + "grad_norm": 3.704623321084444, + "learning_rate": 9.050016184101062e-05, + "loss": 0.9395, + "step": 6594 + }, + { + "epoch": 4.257585539057456, + "grad_norm": 1.905976081298466, + "learning_rate": 9.044086108806138e-05, + "loss": 1.1855, + "step": 6595 + }, + { + "epoch": 4.25823111684958, + "grad_norm": 1.5733239476911964, + "learning_rate": 9.038157138395083e-05, + "loss": 0.9758, + "step": 6596 + }, + { + "epoch": 4.258876694641704, + "grad_norm": 1.7154692167894614, + "learning_rate": 9.032229273967772e-05, + "loss": 1.092, + "step": 6597 + }, + { + "epoch": 4.259522272433828, + "grad_norm": 2.212614593033578, + "learning_rate": 9.026302516623892e-05, + "loss": 1.304, + "step": 6598 + }, + { + "epoch": 4.260167850225952, + "grad_norm": 1.68207161753269, + "learning_rate": 9.02037686746292e-05, + "loss": 0.9093, + "step": 6599 + }, + { + "epoch": 4.260813428018076, + "grad_norm": 1.6672614093337044, + "learning_rate": 9.014452327584118e-05, + "loss": 1.0028, + "step": 6600 + }, + { + "epoch": 4.2614590058102, + "grad_norm": 1.6992155982248613, + "learning_rate": 9.008528898086554e-05, + "loss": 1.0813, + "step": 6601 + }, + { + "epoch": 4.262104583602324, + "grad_norm": 1.7974923131064482, + "learning_rate": 9.002606580069089e-05, + "loss": 0.9885, + "step": 6602 + }, + { + "epoch": 4.262750161394448, + "grad_norm": 1.6344212748898703, + "learning_rate": 8.996685374630367e-05, + "loss": 1.1817, + "step": 6603 + }, + { + "epoch": 4.263395739186572, + "grad_norm": 1.592762268653335, + "learning_rate": 8.990765282868834e-05, + "loss": 1.1014, + "step": 6604 + }, + { + "epoch": 4.264041316978696, + "grad_norm": 1.8439109644379164, + "learning_rate": 8.984846305882739e-05, + "loss": 1.1632, + "step": 6605 + }, + { + "epoch": 4.26468689477082, + "grad_norm": 1.8265735054064594, + "learning_rate": 8.978928444770103e-05, + "loss": 1.1685, + "step": 6606 + }, + { + "epoch": 4.265332472562944, + "grad_norm": 2.098585862789347, + "learning_rate": 8.973011700628751e-05, + "loss": 1.2457, + "step": 6607 + }, + { + "epoch": 4.265978050355068, + "grad_norm": 1.5407374841907082, + "learning_rate": 8.96709607455631e-05, + "loss": 0.9141, + "step": 6608 + }, + { + "epoch": 4.266623628147192, + "grad_norm": 1.998018908525648, + "learning_rate": 8.96118156765018e-05, + "loss": 1.3197, + "step": 6609 + }, + { + "epoch": 4.267269205939316, + "grad_norm": 1.8022831371380792, + "learning_rate": 8.955268181007563e-05, + "loss": 1.1329, + "step": 6610 + }, + { + "epoch": 4.26791478373144, + "grad_norm": 1.784000058827254, + "learning_rate": 8.949355915725462e-05, + "loss": 1.0922, + "step": 6611 + }, + { + "epoch": 4.268560361523564, + "grad_norm": 1.7263093423023699, + "learning_rate": 8.943444772900659e-05, + "loss": 1.0412, + "step": 6612 + }, + { + "epoch": 4.269205939315688, + "grad_norm": 1.7977985797302403, + "learning_rate": 8.937534753629728e-05, + "loss": 1.207, + "step": 6613 + }, + { + "epoch": 4.269851517107812, + "grad_norm": 1.6694917428061125, + "learning_rate": 8.931625859009045e-05, + "loss": 1.0091, + "step": 6614 + }, + { + "epoch": 4.270497094899936, + "grad_norm": 1.7655566536069973, + "learning_rate": 8.925718090134774e-05, + "loss": 1.1832, + "step": 6615 + }, + { + "epoch": 4.27114267269206, + "grad_norm": 1.9245073070181398, + "learning_rate": 8.91981144810286e-05, + "loss": 1.1843, + "step": 6616 + }, + { + "epoch": 4.2717882504841835, + "grad_norm": 1.6197834570498646, + "learning_rate": 8.913905934009047e-05, + "loss": 1.0492, + "step": 6617 + }, + { + "epoch": 4.2724338282763075, + "grad_norm": 1.5328739761910888, + "learning_rate": 8.908001548948877e-05, + "loss": 0.9801, + "step": 6618 + }, + { + "epoch": 4.273079406068431, + "grad_norm": 1.7446662435457883, + "learning_rate": 8.902098294017667e-05, + "loss": 1.145, + "step": 6619 + }, + { + "epoch": 4.273724983860555, + "grad_norm": 1.6033709595244887, + "learning_rate": 8.896196170310531e-05, + "loss": 1.0762, + "step": 6620 + }, + { + "epoch": 4.274370561652679, + "grad_norm": 1.9768092198922507, + "learning_rate": 8.890295178922385e-05, + "loss": 1.2308, + "step": 6621 + }, + { + "epoch": 4.275016139444803, + "grad_norm": 1.8206690255362326, + "learning_rate": 8.88439532094791e-05, + "loss": 1.1969, + "step": 6622 + }, + { + "epoch": 4.275661717236927, + "grad_norm": 1.6541689452063804, + "learning_rate": 8.8784965974816e-05, + "loss": 1.1493, + "step": 6623 + }, + { + "epoch": 4.276307295029051, + "grad_norm": 1.582919782318567, + "learning_rate": 8.872599009617733e-05, + "loss": 1.0338, + "step": 6624 + }, + { + "epoch": 4.276952872821175, + "grad_norm": 1.6664734430814976, + "learning_rate": 8.866702558450359e-05, + "loss": 1.0547, + "step": 6625 + }, + { + "epoch": 4.277598450613299, + "grad_norm": 1.8485605934341587, + "learning_rate": 8.86080724507334e-05, + "loss": 1.1012, + "step": 6626 + }, + { + "epoch": 4.278244028405423, + "grad_norm": 1.5319046246839747, + "learning_rate": 8.854913070580322e-05, + "loss": 0.8683, + "step": 6627 + }, + { + "epoch": 4.278889606197547, + "grad_norm": 1.8060278784051262, + "learning_rate": 8.849020036064725e-05, + "loss": 1.1622, + "step": 6628 + }, + { + "epoch": 4.279535183989671, + "grad_norm": 2.0464924154040904, + "learning_rate": 8.843128142619773e-05, + "loss": 1.3766, + "step": 6629 + }, + { + "epoch": 4.280180761781795, + "grad_norm": 1.5822560837744566, + "learning_rate": 8.837237391338479e-05, + "loss": 0.9265, + "step": 6630 + }, + { + "epoch": 4.280826339573919, + "grad_norm": 1.7443209589626072, + "learning_rate": 8.83134778331363e-05, + "loss": 0.9554, + "step": 6631 + }, + { + "epoch": 4.2814719173660425, + "grad_norm": 1.6111175499129187, + "learning_rate": 8.825459319637811e-05, + "loss": 1.1072, + "step": 6632 + }, + { + "epoch": 4.282117495158166, + "grad_norm": 2.256082566280961, + "learning_rate": 8.8195720014034e-05, + "loss": 1.1969, + "step": 6633 + }, + { + "epoch": 4.28276307295029, + "grad_norm": 1.6304282030779178, + "learning_rate": 8.813685829702546e-05, + "loss": 1.0982, + "step": 6634 + }, + { + "epoch": 4.283408650742414, + "grad_norm": 1.86845817596216, + "learning_rate": 8.8078008056272e-05, + "loss": 1.1716, + "step": 6635 + }, + { + "epoch": 4.284054228534538, + "grad_norm": 1.6597373672313886, + "learning_rate": 8.801916930269097e-05, + "loss": 1.0263, + "step": 6636 + }, + { + "epoch": 4.284699806326662, + "grad_norm": 1.8113756027125933, + "learning_rate": 8.796034204719756e-05, + "loss": 1.1177, + "step": 6637 + }, + { + "epoch": 4.285345384118786, + "grad_norm": 1.5803022596303138, + "learning_rate": 8.790152630070478e-05, + "loss": 0.9964, + "step": 6638 + }, + { + "epoch": 4.28599096191091, + "grad_norm": 1.772193804349215, + "learning_rate": 8.784272207412359e-05, + "loss": 1.1314, + "step": 6639 + }, + { + "epoch": 4.286636539703034, + "grad_norm": 1.776508534937, + "learning_rate": 8.778392937836287e-05, + "loss": 1.0904, + "step": 6640 + }, + { + "epoch": 4.287282117495158, + "grad_norm": 1.6404412186425668, + "learning_rate": 8.772514822432916e-05, + "loss": 1.0629, + "step": 6641 + }, + { + "epoch": 4.287927695287282, + "grad_norm": 1.5107536781225421, + "learning_rate": 8.7666378622927e-05, + "loss": 0.9267, + "step": 6642 + }, + { + "epoch": 4.288573273079406, + "grad_norm": 1.717878065585005, + "learning_rate": 8.760762058505882e-05, + "loss": 0.9653, + "step": 6643 + }, + { + "epoch": 4.28921885087153, + "grad_norm": 1.8535173646085972, + "learning_rate": 8.754887412162481e-05, + "loss": 1.0882, + "step": 6644 + }, + { + "epoch": 4.289864428663654, + "grad_norm": 1.7620486069356627, + "learning_rate": 8.749013924352297e-05, + "loss": 0.9779, + "step": 6645 + }, + { + "epoch": 4.2905100064557775, + "grad_norm": 1.635838109009581, + "learning_rate": 8.743141596164943e-05, + "loss": 1.057, + "step": 6646 + }, + { + "epoch": 4.2911555842479014, + "grad_norm": 1.622260787315111, + "learning_rate": 8.737270428689777e-05, + "loss": 0.9509, + "step": 6647 + }, + { + "epoch": 4.291801162040025, + "grad_norm": 1.7039789967272951, + "learning_rate": 8.731400423015962e-05, + "loss": 0.9643, + "step": 6648 + }, + { + "epoch": 4.292446739832149, + "grad_norm": 1.7175454194854713, + "learning_rate": 8.725531580232467e-05, + "loss": 1.1253, + "step": 6649 + }, + { + "epoch": 4.293092317624274, + "grad_norm": 1.8340528377056766, + "learning_rate": 8.719663901427998e-05, + "loss": 1.1684, + "step": 6650 + }, + { + "epoch": 4.293737895416398, + "grad_norm": 1.6623663707789402, + "learning_rate": 8.713797387691079e-05, + "loss": 1.0693, + "step": 6651 + }, + { + "epoch": 4.294383473208522, + "grad_norm": 1.9147843571547247, + "learning_rate": 8.70793204011002e-05, + "loss": 1.2086, + "step": 6652 + }, + { + "epoch": 4.295029051000646, + "grad_norm": 1.7566351421015858, + "learning_rate": 8.702067859772888e-05, + "loss": 1.0269, + "step": 6653 + }, + { + "epoch": 4.29567462879277, + "grad_norm": 1.8159603391495518, + "learning_rate": 8.69620484776755e-05, + "loss": 1.0884, + "step": 6654 + }, + { + "epoch": 4.296320206584894, + "grad_norm": 1.7021674378891012, + "learning_rate": 8.690343005181674e-05, + "loss": 1.0271, + "step": 6655 + }, + { + "epoch": 4.296965784377018, + "grad_norm": 1.660132938756763, + "learning_rate": 8.684482333102667e-05, + "loss": 0.956, + "step": 6656 + }, + { + "epoch": 4.297611362169142, + "grad_norm": 1.783375100970934, + "learning_rate": 8.678622832617766e-05, + "loss": 1.0848, + "step": 6657 + }, + { + "epoch": 4.298256939961266, + "grad_norm": 1.6377111630758105, + "learning_rate": 8.672764504813964e-05, + "loss": 1.0476, + "step": 6658 + }, + { + "epoch": 4.2989025177533895, + "grad_norm": 1.7871932295320256, + "learning_rate": 8.66690735077803e-05, + "loss": 1.0807, + "step": 6659 + }, + { + "epoch": 4.299548095545513, + "grad_norm": 1.6002287771181782, + "learning_rate": 8.661051371596538e-05, + "loss": 1.0186, + "step": 6660 + }, + { + "epoch": 4.300193673337637, + "grad_norm": 1.7646248393072481, + "learning_rate": 8.655196568355831e-05, + "loss": 0.9917, + "step": 6661 + }, + { + "epoch": 4.300839251129761, + "grad_norm": 1.6052816008501845, + "learning_rate": 8.649342942142037e-05, + "loss": 0.9904, + "step": 6662 + }, + { + "epoch": 4.301484828921885, + "grad_norm": 1.564937248595098, + "learning_rate": 8.64349049404106e-05, + "loss": 0.958, + "step": 6663 + }, + { + "epoch": 4.302130406714009, + "grad_norm": 1.8359593412099846, + "learning_rate": 8.637639225138595e-05, + "loss": 1.0976, + "step": 6664 + }, + { + "epoch": 4.302775984506133, + "grad_norm": 1.7237345586115405, + "learning_rate": 8.631789136520107e-05, + "loss": 1.0246, + "step": 6665 + }, + { + "epoch": 4.303421562298257, + "grad_norm": 1.8857618408657009, + "learning_rate": 8.625940229270856e-05, + "loss": 1.0902, + "step": 6666 + }, + { + "epoch": 4.304067140090381, + "grad_norm": 1.6234408465802639, + "learning_rate": 8.620092504475868e-05, + "loss": 0.9106, + "step": 6667 + }, + { + "epoch": 4.304712717882505, + "grad_norm": 1.6762992996075263, + "learning_rate": 8.614245963219957e-05, + "loss": 1.127, + "step": 6668 + }, + { + "epoch": 4.305358295674629, + "grad_norm": 1.678597595327169, + "learning_rate": 8.608400606587724e-05, + "loss": 1.0878, + "step": 6669 + }, + { + "epoch": 4.306003873466753, + "grad_norm": 1.8535857674753473, + "learning_rate": 8.602556435663531e-05, + "loss": 1.033, + "step": 6670 + }, + { + "epoch": 4.306649451258877, + "grad_norm": 1.444319760624133, + "learning_rate": 8.596713451531551e-05, + "loss": 0.8544, + "step": 6671 + }, + { + "epoch": 4.307295029051001, + "grad_norm": 1.813214012688657, + "learning_rate": 8.590871655275704e-05, + "loss": 0.9722, + "step": 6672 + }, + { + "epoch": 4.3079406068431245, + "grad_norm": 2.2173334061012904, + "learning_rate": 8.5850310479797e-05, + "loss": 0.9538, + "step": 6673 + }, + { + "epoch": 4.3085861846352485, + "grad_norm": 1.890019743789305, + "learning_rate": 8.57919163072705e-05, + "loss": 1.2053, + "step": 6674 + }, + { + "epoch": 4.309231762427372, + "grad_norm": 2.0372636036580296, + "learning_rate": 8.573353404601013e-05, + "loss": 1.2041, + "step": 6675 + }, + { + "epoch": 4.309877340219496, + "grad_norm": 1.8073270701381197, + "learning_rate": 8.567516370684636e-05, + "loss": 1.1259, + "step": 6676 + }, + { + "epoch": 4.31052291801162, + "grad_norm": 1.9138238600775201, + "learning_rate": 8.56168053006077e-05, + "loss": 1.2749, + "step": 6677 + }, + { + "epoch": 4.311168495803744, + "grad_norm": 1.9763939207748875, + "learning_rate": 8.555845883812003e-05, + "loss": 1.0677, + "step": 6678 + }, + { + "epoch": 4.311814073595868, + "grad_norm": 1.8936439417619995, + "learning_rate": 8.550012433020727e-05, + "loss": 1.0876, + "step": 6679 + }, + { + "epoch": 4.312459651387992, + "grad_norm": 1.8594429847197207, + "learning_rate": 8.544180178769121e-05, + "loss": 1.174, + "step": 6680 + }, + { + "epoch": 4.313105229180116, + "grad_norm": 1.921209937164633, + "learning_rate": 8.538349122139107e-05, + "loss": 0.9888, + "step": 6681 + }, + { + "epoch": 4.31375080697224, + "grad_norm": 1.848451993356454, + "learning_rate": 8.532519264212426e-05, + "loss": 1.0733, + "step": 6682 + }, + { + "epoch": 4.314396384764364, + "grad_norm": 1.9176858483042425, + "learning_rate": 8.526690606070574e-05, + "loss": 1.278, + "step": 6683 + }, + { + "epoch": 4.315041962556488, + "grad_norm": 1.747746897808525, + "learning_rate": 8.520863148794812e-05, + "loss": 1.1252, + "step": 6684 + }, + { + "epoch": 4.315687540348612, + "grad_norm": 1.5940157205485281, + "learning_rate": 8.515036893466213e-05, + "loss": 0.9234, + "step": 6685 + }, + { + "epoch": 4.316333118140736, + "grad_norm": 2.8316007029297823, + "learning_rate": 8.509211841165597e-05, + "loss": 1.0213, + "step": 6686 + }, + { + "epoch": 4.3169786959328595, + "grad_norm": 1.7172041518321655, + "learning_rate": 8.503387992973576e-05, + "loss": 0.9731, + "step": 6687 + }, + { + "epoch": 4.3176242737249835, + "grad_norm": 1.8146119506471654, + "learning_rate": 8.497565349970534e-05, + "loss": 0.9994, + "step": 6688 + }, + { + "epoch": 4.318269851517107, + "grad_norm": 1.771605443164049, + "learning_rate": 8.491743913236628e-05, + "loss": 1.0607, + "step": 6689 + }, + { + "epoch": 4.318915429309232, + "grad_norm": 1.7913589551618194, + "learning_rate": 8.485923683851799e-05, + "loss": 1.045, + "step": 6690 + }, + { + "epoch": 4.319561007101356, + "grad_norm": 1.4523855337485767, + "learning_rate": 8.480104662895758e-05, + "loss": 0.8431, + "step": 6691 + }, + { + "epoch": 4.32020658489348, + "grad_norm": 1.7451679985306732, + "learning_rate": 8.474286851447996e-05, + "loss": 1.1088, + "step": 6692 + }, + { + "epoch": 4.320852162685604, + "grad_norm": 1.7382011248087532, + "learning_rate": 8.468470250587776e-05, + "loss": 1.1438, + "step": 6693 + }, + { + "epoch": 4.321497740477728, + "grad_norm": 1.8915741223204325, + "learning_rate": 8.462654861394134e-05, + "loss": 1.0047, + "step": 6694 + }, + { + "epoch": 4.322143318269852, + "grad_norm": 1.7605784450765902, + "learning_rate": 8.456840684945886e-05, + "loss": 1.0753, + "step": 6695 + }, + { + "epoch": 4.322788896061976, + "grad_norm": 1.5271778132043239, + "learning_rate": 8.451027722321636e-05, + "loss": 1.0047, + "step": 6696 + }, + { + "epoch": 4.3234344738541, + "grad_norm": 1.8763536056912444, + "learning_rate": 8.445215974599732e-05, + "loss": 1.3193, + "step": 6697 + }, + { + "epoch": 4.324080051646224, + "grad_norm": 1.7493020668586812, + "learning_rate": 8.439405442858312e-05, + "loss": 1.0146, + "step": 6698 + }, + { + "epoch": 4.324725629438348, + "grad_norm": 2.0532044481931644, + "learning_rate": 8.433596128175313e-05, + "loss": 1.3721, + "step": 6699 + }, + { + "epoch": 4.3253712072304715, + "grad_norm": 1.944835184790859, + "learning_rate": 8.427788031628399e-05, + "loss": 1.3694, + "step": 6700 + }, + { + "epoch": 4.3260167850225955, + "grad_norm": 2.0623128770776797, + "learning_rate": 8.421981154295033e-05, + "loss": 1.1187, + "step": 6701 + }, + { + "epoch": 4.326662362814719, + "grad_norm": 1.686418123327451, + "learning_rate": 8.416175497252472e-05, + "loss": 1.0583, + "step": 6702 + }, + { + "epoch": 4.327307940606843, + "grad_norm": 1.659076757747814, + "learning_rate": 8.410371061577707e-05, + "loss": 1.0743, + "step": 6703 + }, + { + "epoch": 4.327953518398967, + "grad_norm": 1.5413220976820028, + "learning_rate": 8.40456784834752e-05, + "loss": 0.8979, + "step": 6704 + }, + { + "epoch": 4.328599096191091, + "grad_norm": 1.6721801232874791, + "learning_rate": 8.398765858638484e-05, + "loss": 1.1646, + "step": 6705 + }, + { + "epoch": 4.329244673983215, + "grad_norm": 1.705574430881442, + "learning_rate": 8.39296509352691e-05, + "loss": 0.9136, + "step": 6706 + }, + { + "epoch": 4.329890251775339, + "grad_norm": 1.6049986319217078, + "learning_rate": 8.38716555408891e-05, + "loss": 1.0685, + "step": 6707 + }, + { + "epoch": 4.330535829567463, + "grad_norm": 2.004666629662238, + "learning_rate": 8.381367241400365e-05, + "loss": 1.1487, + "step": 6708 + }, + { + "epoch": 4.331181407359587, + "grad_norm": 1.8637051249324421, + "learning_rate": 8.375570156536903e-05, + "loss": 1.2203, + "step": 6709 + }, + { + "epoch": 4.331826985151711, + "grad_norm": 1.642371232394214, + "learning_rate": 8.36977430057396e-05, + "loss": 1.0144, + "step": 6710 + }, + { + "epoch": 4.332472562943835, + "grad_norm": 1.4902225768635025, + "learning_rate": 8.363979674586722e-05, + "loss": 1.0515, + "step": 6711 + }, + { + "epoch": 4.333118140735959, + "grad_norm": 1.8340748172510672, + "learning_rate": 8.358186279650152e-05, + "loss": 1.1399, + "step": 6712 + }, + { + "epoch": 4.333763718528083, + "grad_norm": 1.9105039421308418, + "learning_rate": 8.352394116838985e-05, + "loss": 1.1259, + "step": 6713 + }, + { + "epoch": 4.3344092963202066, + "grad_norm": 1.5994453895846827, + "learning_rate": 8.346603187227728e-05, + "loss": 1.0334, + "step": 6714 + }, + { + "epoch": 4.3350548741123305, + "grad_norm": 1.8641089593558964, + "learning_rate": 8.340813491890658e-05, + "loss": 1.1859, + "step": 6715 + }, + { + "epoch": 4.335700451904454, + "grad_norm": 1.5692699539913326, + "learning_rate": 8.335025031901825e-05, + "loss": 0.9146, + "step": 6716 + }, + { + "epoch": 4.336346029696578, + "grad_norm": 1.4970440587031202, + "learning_rate": 8.329237808335045e-05, + "loss": 0.8929, + "step": 6717 + }, + { + "epoch": 4.336991607488702, + "grad_norm": 1.648084446682759, + "learning_rate": 8.323451822263911e-05, + "loss": 1.0564, + "step": 6718 + }, + { + "epoch": 4.337637185280826, + "grad_norm": 1.6047880128266043, + "learning_rate": 8.317667074761784e-05, + "loss": 1.1039, + "step": 6719 + }, + { + "epoch": 4.33828276307295, + "grad_norm": 1.7064270257505076, + "learning_rate": 8.311883566901787e-05, + "loss": 1.0516, + "step": 6720 + }, + { + "epoch": 4.338928340865074, + "grad_norm": 2.0728370135590315, + "learning_rate": 8.30610129975684e-05, + "loss": 1.3048, + "step": 6721 + }, + { + "epoch": 4.339573918657198, + "grad_norm": 1.527896906275674, + "learning_rate": 8.300320274399594e-05, + "loss": 1.0327, + "step": 6722 + }, + { + "epoch": 4.340219496449322, + "grad_norm": 2.0006368408459543, + "learning_rate": 8.294540491902489e-05, + "loss": 1.0776, + "step": 6723 + }, + { + "epoch": 4.340865074241446, + "grad_norm": 1.6483257106725928, + "learning_rate": 8.288761953337756e-05, + "loss": 1.0426, + "step": 6724 + }, + { + "epoch": 4.34151065203357, + "grad_norm": 1.8013572290198805, + "learning_rate": 8.282984659777351e-05, + "loss": 1.174, + "step": 6725 + }, + { + "epoch": 4.342156229825694, + "grad_norm": 1.9026006924378087, + "learning_rate": 8.277208612293027e-05, + "loss": 1.0129, + "step": 6726 + }, + { + "epoch": 4.342801807617818, + "grad_norm": 1.7317862896550509, + "learning_rate": 8.271433811956314e-05, + "loss": 1.0114, + "step": 6727 + }, + { + "epoch": 4.343447385409942, + "grad_norm": 2.0203774042105556, + "learning_rate": 8.265660259838484e-05, + "loss": 1.1783, + "step": 6728 + }, + { + "epoch": 4.3440929632020655, + "grad_norm": 1.5982284975640104, + "learning_rate": 8.259887957010586e-05, + "loss": 0.9535, + "step": 6729 + }, + { + "epoch": 4.344738540994189, + "grad_norm": 1.907971422923918, + "learning_rate": 8.254116904543465e-05, + "loss": 0.9924, + "step": 6730 + }, + { + "epoch": 4.345384118786313, + "grad_norm": 1.6255616215702864, + "learning_rate": 8.248347103507683e-05, + "loss": 1.022, + "step": 6731 + }, + { + "epoch": 4.346029696578437, + "grad_norm": 1.7130692034396269, + "learning_rate": 8.24257855497362e-05, + "loss": 1.1229, + "step": 6732 + }, + { + "epoch": 4.346675274370561, + "grad_norm": 1.8100460892242214, + "learning_rate": 8.236811260011399e-05, + "loss": 1.1003, + "step": 6733 + }, + { + "epoch": 4.347320852162685, + "grad_norm": 1.6649718694947193, + "learning_rate": 8.231045219690894e-05, + "loss": 1.0204, + "step": 6734 + }, + { + "epoch": 4.347966429954809, + "grad_norm": 1.6349443088206914, + "learning_rate": 8.225280435081786e-05, + "loss": 1.0884, + "step": 6735 + }, + { + "epoch": 4.348612007746934, + "grad_norm": 2.3002544124501134, + "learning_rate": 8.219516907253495e-05, + "loss": 1.0708, + "step": 6736 + }, + { + "epoch": 4.349257585539058, + "grad_norm": 1.7832733002034922, + "learning_rate": 8.213754637275217e-05, + "loss": 1.1126, + "step": 6737 + }, + { + "epoch": 4.349903163331182, + "grad_norm": 1.947535371568457, + "learning_rate": 8.20799362621591e-05, + "loss": 1.1886, + "step": 6738 + }, + { + "epoch": 4.350548741123306, + "grad_norm": 1.6096896226012656, + "learning_rate": 8.202233875144303e-05, + "loss": 0.9801, + "step": 6739 + }, + { + "epoch": 4.35119431891543, + "grad_norm": 1.6030102609685137, + "learning_rate": 8.196475385128889e-05, + "loss": 0.9303, + "step": 6740 + }, + { + "epoch": 4.351839896707554, + "grad_norm": 1.7002507819405717, + "learning_rate": 8.19071815723793e-05, + "loss": 1.0456, + "step": 6741 + }, + { + "epoch": 4.3524854744996775, + "grad_norm": 1.8030409870940882, + "learning_rate": 8.184962192539448e-05, + "loss": 1.0032, + "step": 6742 + }, + { + "epoch": 4.353131052291801, + "grad_norm": 1.7727467494186844, + "learning_rate": 8.179207492101235e-05, + "loss": 1.1316, + "step": 6743 + }, + { + "epoch": 4.353776630083925, + "grad_norm": 1.7685296168705853, + "learning_rate": 8.173454056990851e-05, + "loss": 1.0543, + "step": 6744 + }, + { + "epoch": 4.354422207876049, + "grad_norm": 1.6220060829509957, + "learning_rate": 8.16770188827561e-05, + "loss": 1.0321, + "step": 6745 + }, + { + "epoch": 4.355067785668173, + "grad_norm": 1.9082493296474377, + "learning_rate": 8.161950987022615e-05, + "loss": 1.2373, + "step": 6746 + }, + { + "epoch": 4.355713363460297, + "grad_norm": 1.6690712932543135, + "learning_rate": 8.156201354298702e-05, + "loss": 1.0439, + "step": 6747 + }, + { + "epoch": 4.356358941252421, + "grad_norm": 1.776387461745141, + "learning_rate": 8.150452991170485e-05, + "loss": 1.0657, + "step": 6748 + }, + { + "epoch": 4.357004519044545, + "grad_norm": 1.9199331753551363, + "learning_rate": 8.144705898704368e-05, + "loss": 1.1606, + "step": 6749 + }, + { + "epoch": 4.357650096836669, + "grad_norm": 1.6601616354633784, + "learning_rate": 8.138960077966473e-05, + "loss": 1.0187, + "step": 6750 + }, + { + "epoch": 4.358295674628793, + "grad_norm": 1.7989068627150704, + "learning_rate": 8.133215530022713e-05, + "loss": 1.044, + "step": 6751 + }, + { + "epoch": 4.358941252420917, + "grad_norm": 1.781120724661769, + "learning_rate": 8.127472255938778e-05, + "loss": 1.0647, + "step": 6752 + }, + { + "epoch": 4.359586830213041, + "grad_norm": 2.1072214679694707, + "learning_rate": 8.121730256780082e-05, + "loss": 1.1639, + "step": 6753 + }, + { + "epoch": 4.360232408005165, + "grad_norm": 1.6391640659012434, + "learning_rate": 8.115989533611842e-05, + "loss": 1.0476, + "step": 6754 + }, + { + "epoch": 4.360877985797289, + "grad_norm": 1.8519471575338042, + "learning_rate": 8.110250087499022e-05, + "loss": 1.0146, + "step": 6755 + }, + { + "epoch": 4.3615235635894125, + "grad_norm": 1.7352594864664734, + "learning_rate": 8.104511919506332e-05, + "loss": 1.1732, + "step": 6756 + }, + { + "epoch": 4.362169141381536, + "grad_norm": 1.8045669028001101, + "learning_rate": 8.098775030698275e-05, + "loss": 1.1904, + "step": 6757 + }, + { + "epoch": 4.36281471917366, + "grad_norm": 1.943764614293319, + "learning_rate": 8.09303942213911e-05, + "loss": 1.2681, + "step": 6758 + }, + { + "epoch": 4.363460296965784, + "grad_norm": 1.8029942275313169, + "learning_rate": 8.087305094892834e-05, + "loss": 1.1944, + "step": 6759 + }, + { + "epoch": 4.364105874757908, + "grad_norm": 1.6761490020527183, + "learning_rate": 8.081572050023236e-05, + "loss": 1.0406, + "step": 6760 + }, + { + "epoch": 4.364751452550032, + "grad_norm": 1.7007129125411753, + "learning_rate": 8.075840288593852e-05, + "loss": 1.0327, + "step": 6761 + }, + { + "epoch": 4.365397030342156, + "grad_norm": 1.5890129607481611, + "learning_rate": 8.070109811667987e-05, + "loss": 0.9963, + "step": 6762 + }, + { + "epoch": 4.36604260813428, + "grad_norm": 1.902633424072677, + "learning_rate": 8.0643806203087e-05, + "loss": 1.1574, + "step": 6763 + }, + { + "epoch": 4.366688185926404, + "grad_norm": 1.698742994954233, + "learning_rate": 8.058652715578817e-05, + "loss": 0.9295, + "step": 6764 + }, + { + "epoch": 4.367333763718528, + "grad_norm": 1.5477414764098536, + "learning_rate": 8.052926098540923e-05, + "loss": 1.004, + "step": 6765 + }, + { + "epoch": 4.367979341510652, + "grad_norm": 1.565439606608795, + "learning_rate": 8.047200770257363e-05, + "loss": 0.9783, + "step": 6766 + }, + { + "epoch": 4.368624919302776, + "grad_norm": 1.694814937026095, + "learning_rate": 8.041476731790249e-05, + "loss": 1.0987, + "step": 6767 + }, + { + "epoch": 4.3692704970949, + "grad_norm": 1.8080300474574889, + "learning_rate": 8.035753984201448e-05, + "loss": 0.952, + "step": 6768 + }, + { + "epoch": 4.369916074887024, + "grad_norm": 1.8284261314454477, + "learning_rate": 8.030032528552589e-05, + "loss": 1.1235, + "step": 6769 + }, + { + "epoch": 4.3705616526791475, + "grad_norm": 1.9861305274635872, + "learning_rate": 8.024312365905054e-05, + "loss": 1.057, + "step": 6770 + }, + { + "epoch": 4.3712072304712715, + "grad_norm": 1.7575267917038289, + "learning_rate": 8.018593497320012e-05, + "loss": 1.1609, + "step": 6771 + }, + { + "epoch": 4.371852808263395, + "grad_norm": 1.8653720628009052, + "learning_rate": 8.012875923858353e-05, + "loss": 1.193, + "step": 6772 + }, + { + "epoch": 4.372498386055519, + "grad_norm": 1.7149683696922542, + "learning_rate": 8.007159646580749e-05, + "loss": 1.0997, + "step": 6773 + }, + { + "epoch": 4.373143963847643, + "grad_norm": 1.6608220442422659, + "learning_rate": 8.001444666547644e-05, + "loss": 1.0074, + "step": 6774 + }, + { + "epoch": 4.373789541639767, + "grad_norm": 1.7289385250101081, + "learning_rate": 7.995730984819211e-05, + "loss": 1.098, + "step": 6775 + }, + { + "epoch": 4.374435119431892, + "grad_norm": 1.5749692099727353, + "learning_rate": 7.990018602455395e-05, + "loss": 0.9478, + "step": 6776 + }, + { + "epoch": 4.375080697224016, + "grad_norm": 1.5898878577004316, + "learning_rate": 7.984307520515921e-05, + "loss": 0.9692, + "step": 6777 + }, + { + "epoch": 4.37572627501614, + "grad_norm": 1.946263815047212, + "learning_rate": 7.97859774006023e-05, + "loss": 1.261, + "step": 6778 + }, + { + "epoch": 4.376371852808264, + "grad_norm": 1.5563717480953345, + "learning_rate": 7.972889262147563e-05, + "loss": 0.8951, + "step": 6779 + }, + { + "epoch": 4.377017430600388, + "grad_norm": 1.4177302264234461, + "learning_rate": 7.967182087836903e-05, + "loss": 0.8326, + "step": 6780 + }, + { + "epoch": 4.377663008392512, + "grad_norm": 1.6579412522051298, + "learning_rate": 7.96147621818697e-05, + "loss": 0.9626, + "step": 6781 + }, + { + "epoch": 4.378308586184636, + "grad_norm": 1.6745859052470298, + "learning_rate": 7.955771654256282e-05, + "loss": 0.9262, + "step": 6782 + }, + { + "epoch": 4.3789541639767595, + "grad_norm": 1.6295124110068178, + "learning_rate": 7.950068397103094e-05, + "loss": 1.0008, + "step": 6783 + }, + { + "epoch": 4.3795997417688834, + "grad_norm": 1.5290519259512574, + "learning_rate": 7.944366447785402e-05, + "loss": 0.8226, + "step": 6784 + }, + { + "epoch": 4.380245319561007, + "grad_norm": 1.7423336776104967, + "learning_rate": 7.938665807360997e-05, + "loss": 1.0708, + "step": 6785 + }, + { + "epoch": 4.380890897353131, + "grad_norm": 1.7406105156202356, + "learning_rate": 7.932966476887395e-05, + "loss": 1.0022, + "step": 6786 + }, + { + "epoch": 4.381536475145255, + "grad_norm": 1.6916627510664057, + "learning_rate": 7.927268457421888e-05, + "loss": 0.9864, + "step": 6787 + }, + { + "epoch": 4.382182052937379, + "grad_norm": 1.8918312244444908, + "learning_rate": 7.921571750021512e-05, + "loss": 1.0681, + "step": 6788 + }, + { + "epoch": 4.382827630729503, + "grad_norm": 1.6827072475342477, + "learning_rate": 7.915876355743067e-05, + "loss": 1.0074, + "step": 6789 + }, + { + "epoch": 4.383473208521627, + "grad_norm": 2.2839777777448758, + "learning_rate": 7.91018227564311e-05, + "loss": 1.0619, + "step": 6790 + }, + { + "epoch": 4.384118786313751, + "grad_norm": 1.9354649907891601, + "learning_rate": 7.904489510777951e-05, + "loss": 1.166, + "step": 6791 + }, + { + "epoch": 4.384764364105875, + "grad_norm": 1.8022247312427147, + "learning_rate": 7.898798062203654e-05, + "loss": 1.2218, + "step": 6792 + }, + { + "epoch": 4.385409941897999, + "grad_norm": 1.914457039628721, + "learning_rate": 7.893107930976045e-05, + "loss": 1.1239, + "step": 6793 + }, + { + "epoch": 4.386055519690123, + "grad_norm": 1.8981213390216314, + "learning_rate": 7.887419118150702e-05, + "loss": 1.2454, + "step": 6794 + }, + { + "epoch": 4.386701097482247, + "grad_norm": 1.75066693034147, + "learning_rate": 7.881731624782955e-05, + "loss": 0.9506, + "step": 6795 + }, + { + "epoch": 4.387346675274371, + "grad_norm": 1.5382008527196496, + "learning_rate": 7.876045451927904e-05, + "loss": 0.9185, + "step": 6796 + }, + { + "epoch": 4.3879922530664945, + "grad_norm": 1.4624328915827158, + "learning_rate": 7.870360600640381e-05, + "loss": 0.8998, + "step": 6797 + }, + { + "epoch": 4.3886378308586185, + "grad_norm": 1.6855366080224947, + "learning_rate": 7.864677071974985e-05, + "loss": 0.9991, + "step": 6798 + }, + { + "epoch": 4.389283408650742, + "grad_norm": 1.9362695634506641, + "learning_rate": 7.858994866986086e-05, + "loss": 1.1647, + "step": 6799 + }, + { + "epoch": 4.389928986442866, + "grad_norm": 1.7218338034813632, + "learning_rate": 7.853313986727771e-05, + "loss": 1.0713, + "step": 6800 + }, + { + "epoch": 4.39057456423499, + "grad_norm": 1.8795341691484402, + "learning_rate": 7.84763443225391e-05, + "loss": 1.1031, + "step": 6801 + }, + { + "epoch": 4.391220142027114, + "grad_norm": 1.679815989835826, + "learning_rate": 7.84195620461813e-05, + "loss": 1.1149, + "step": 6802 + }, + { + "epoch": 4.391865719819238, + "grad_norm": 1.6592425952969114, + "learning_rate": 7.836279304873781e-05, + "loss": 1.1004, + "step": 6803 + }, + { + "epoch": 4.392511297611362, + "grad_norm": 1.6364406301783423, + "learning_rate": 7.830603734074003e-05, + "loss": 1.0247, + "step": 6804 + }, + { + "epoch": 4.393156875403486, + "grad_norm": 1.7094376649877652, + "learning_rate": 7.824929493271675e-05, + "loss": 1.0505, + "step": 6805 + }, + { + "epoch": 4.39380245319561, + "grad_norm": 1.6417468289432393, + "learning_rate": 7.819256583519407e-05, + "loss": 0.9385, + "step": 6806 + }, + { + "epoch": 4.394448030987734, + "grad_norm": 1.8250787594993307, + "learning_rate": 7.813585005869604e-05, + "loss": 1.0742, + "step": 6807 + }, + { + "epoch": 4.395093608779858, + "grad_norm": 1.5457509857399627, + "learning_rate": 7.807914761374399e-05, + "loss": 0.9272, + "step": 6808 + }, + { + "epoch": 4.395739186571982, + "grad_norm": 1.7442508744159266, + "learning_rate": 7.802245851085666e-05, + "loss": 1.1769, + "step": 6809 + }, + { + "epoch": 4.396384764364106, + "grad_norm": 1.7199576142954351, + "learning_rate": 7.796578276055065e-05, + "loss": 0.9747, + "step": 6810 + }, + { + "epoch": 4.39703034215623, + "grad_norm": 1.7899458966064457, + "learning_rate": 7.790912037333982e-05, + "loss": 1.2131, + "step": 6811 + }, + { + "epoch": 4.3976759199483535, + "grad_norm": 1.65423553539896, + "learning_rate": 7.785247135973564e-05, + "loss": 1.0945, + "step": 6812 + }, + { + "epoch": 4.398321497740477, + "grad_norm": 1.4944652389073203, + "learning_rate": 7.779583573024709e-05, + "loss": 0.8809, + "step": 6813 + }, + { + "epoch": 4.398967075532601, + "grad_norm": 1.697947858953845, + "learning_rate": 7.773921349538066e-05, + "loss": 1.0546, + "step": 6814 + }, + { + "epoch": 4.399612653324725, + "grad_norm": 1.7906439993045111, + "learning_rate": 7.768260466564037e-05, + "loss": 1.1529, + "step": 6815 + }, + { + "epoch": 4.400258231116849, + "grad_norm": 1.8948460506310312, + "learning_rate": 7.762600925152776e-05, + "loss": 1.155, + "step": 6816 + }, + { + "epoch": 4.400903808908973, + "grad_norm": 1.899182707452482, + "learning_rate": 7.756942726354182e-05, + "loss": 1.258, + "step": 6817 + }, + { + "epoch": 4.401549386701097, + "grad_norm": 1.5154893510076175, + "learning_rate": 7.751285871217916e-05, + "loss": 0.9568, + "step": 6818 + }, + { + "epoch": 4.402194964493221, + "grad_norm": 1.8522519497559136, + "learning_rate": 7.74563036079338e-05, + "loss": 1.0684, + "step": 6819 + }, + { + "epoch": 4.402840542285345, + "grad_norm": 2.120426793125909, + "learning_rate": 7.739976196129721e-05, + "loss": 1.0978, + "step": 6820 + }, + { + "epoch": 4.403486120077469, + "grad_norm": 1.9145345891366703, + "learning_rate": 7.734323378275868e-05, + "loss": 1.169, + "step": 6821 + }, + { + "epoch": 4.404131697869594, + "grad_norm": 1.8135594408825797, + "learning_rate": 7.728671908280458e-05, + "loss": 1.2102, + "step": 6822 + }, + { + "epoch": 4.404777275661718, + "grad_norm": 1.6822504905141915, + "learning_rate": 7.723021787191897e-05, + "loss": 1.0292, + "step": 6823 + }, + { + "epoch": 4.4054228534538415, + "grad_norm": 1.5463181183996522, + "learning_rate": 7.717373016058361e-05, + "loss": 1.0249, + "step": 6824 + }, + { + "epoch": 4.4060684312459655, + "grad_norm": 1.6017270247204214, + "learning_rate": 7.711725595927732e-05, + "loss": 0.9548, + "step": 6825 + }, + { + "epoch": 4.406714009038089, + "grad_norm": 2.018104104639786, + "learning_rate": 7.706079527847671e-05, + "loss": 1.1167, + "step": 6826 + }, + { + "epoch": 4.407359586830213, + "grad_norm": 1.8356529531910595, + "learning_rate": 7.700434812865599e-05, + "loss": 1.1207, + "step": 6827 + }, + { + "epoch": 4.408005164622337, + "grad_norm": 1.7354932667404246, + "learning_rate": 7.694791452028642e-05, + "loss": 1.138, + "step": 6828 + }, + { + "epoch": 4.408650742414461, + "grad_norm": 1.6731296281172474, + "learning_rate": 7.689149446383725e-05, + "loss": 0.9295, + "step": 6829 + }, + { + "epoch": 4.409296320206585, + "grad_norm": 1.6142567704731468, + "learning_rate": 7.683508796977496e-05, + "loss": 1.0051, + "step": 6830 + }, + { + "epoch": 4.409941897998709, + "grad_norm": 1.7562739624538442, + "learning_rate": 7.677869504856336e-05, + "loss": 1.1248, + "step": 6831 + }, + { + "epoch": 4.410587475790833, + "grad_norm": 1.7357924110442673, + "learning_rate": 7.672231571066407e-05, + "loss": 1.0008, + "step": 6832 + }, + { + "epoch": 4.411233053582957, + "grad_norm": 1.9412723512715466, + "learning_rate": 7.666594996653611e-05, + "loss": 1.1597, + "step": 6833 + }, + { + "epoch": 4.411878631375081, + "grad_norm": 1.6319108498785013, + "learning_rate": 7.66095978266357e-05, + "loss": 1.0143, + "step": 6834 + }, + { + "epoch": 4.412524209167205, + "grad_norm": 1.4166211205224222, + "learning_rate": 7.655325930141691e-05, + "loss": 0.8812, + "step": 6835 + }, + { + "epoch": 4.413169786959329, + "grad_norm": 2.0969075053522026, + "learning_rate": 7.649693440133108e-05, + "loss": 1.2316, + "step": 6836 + }, + { + "epoch": 4.413815364751453, + "grad_norm": 1.8320946216859166, + "learning_rate": 7.644062313682704e-05, + "loss": 1.1358, + "step": 6837 + }, + { + "epoch": 4.414460942543577, + "grad_norm": 1.826565640730574, + "learning_rate": 7.638432551835116e-05, + "loss": 1.0557, + "step": 6838 + }, + { + "epoch": 4.4151065203357005, + "grad_norm": 1.8287333808558053, + "learning_rate": 7.63280415563472e-05, + "loss": 1.0544, + "step": 6839 + }, + { + "epoch": 4.415752098127824, + "grad_norm": 1.626427432161819, + "learning_rate": 7.627177126125641e-05, + "loss": 1.0249, + "step": 6840 + }, + { + "epoch": 4.416397675919948, + "grad_norm": 1.6215761977098413, + "learning_rate": 7.621551464351754e-05, + "loss": 0.9073, + "step": 6841 + }, + { + "epoch": 4.417043253712072, + "grad_norm": 1.7888465973649637, + "learning_rate": 7.615927171356677e-05, + "loss": 1.0105, + "step": 6842 + }, + { + "epoch": 4.417688831504196, + "grad_norm": 1.9304070698048388, + "learning_rate": 7.610304248183774e-05, + "loss": 1.0079, + "step": 6843 + }, + { + "epoch": 4.41833440929632, + "grad_norm": 1.517373713507757, + "learning_rate": 7.604682695876156e-05, + "loss": 0.8385, + "step": 6844 + }, + { + "epoch": 4.418979987088444, + "grad_norm": 1.6323378785037856, + "learning_rate": 7.599062515476674e-05, + "loss": 1.0742, + "step": 6845 + }, + { + "epoch": 4.419625564880568, + "grad_norm": 1.6957468281062404, + "learning_rate": 7.593443708027945e-05, + "loss": 1.1844, + "step": 6846 + }, + { + "epoch": 4.420271142672692, + "grad_norm": 1.7449719911590007, + "learning_rate": 7.587826274572303e-05, + "loss": 0.9915, + "step": 6847 + }, + { + "epoch": 4.420916720464816, + "grad_norm": 2.09098679025348, + "learning_rate": 7.582210216151836e-05, + "loss": 1.1347, + "step": 6848 + }, + { + "epoch": 4.42156229825694, + "grad_norm": 1.5555167756721189, + "learning_rate": 7.576595533808398e-05, + "loss": 0.9593, + "step": 6849 + }, + { + "epoch": 4.422207876049064, + "grad_norm": 1.7803095160227187, + "learning_rate": 7.57098222858356e-05, + "loss": 1.1312, + "step": 6850 + }, + { + "epoch": 4.422853453841188, + "grad_norm": 1.762758363455301, + "learning_rate": 7.56537030151864e-05, + "loss": 1.1533, + "step": 6851 + }, + { + "epoch": 4.423499031633312, + "grad_norm": 1.5449197594800688, + "learning_rate": 7.559759753654733e-05, + "loss": 0.9669, + "step": 6852 + }, + { + "epoch": 4.4241446094254355, + "grad_norm": 1.7263399362229832, + "learning_rate": 7.554150586032627e-05, + "loss": 0.8807, + "step": 6853 + }, + { + "epoch": 4.4247901872175595, + "grad_norm": 1.8113285640405465, + "learning_rate": 7.548542799692898e-05, + "loss": 1.166, + "step": 6854 + }, + { + "epoch": 4.425435765009683, + "grad_norm": 1.9744141734153786, + "learning_rate": 7.542936395675852e-05, + "loss": 1.2122, + "step": 6855 + }, + { + "epoch": 4.426081342801807, + "grad_norm": 1.8723981564377232, + "learning_rate": 7.537331375021515e-05, + "loss": 1.1164, + "step": 6856 + }, + { + "epoch": 4.426726920593931, + "grad_norm": 3.4701046653955325, + "learning_rate": 7.531727738769692e-05, + "loss": 0.8653, + "step": 6857 + }, + { + "epoch": 4.427372498386055, + "grad_norm": 1.6485500197201939, + "learning_rate": 7.52612548795992e-05, + "loss": 1.1144, + "step": 6858 + }, + { + "epoch": 4.428018076178179, + "grad_norm": 2.35969385724169, + "learning_rate": 7.520524623631454e-05, + "loss": 0.9648, + "step": 6859 + }, + { + "epoch": 4.428663653970303, + "grad_norm": 1.8207125381855673, + "learning_rate": 7.514925146823334e-05, + "loss": 0.9807, + "step": 6860 + }, + { + "epoch": 4.429309231762427, + "grad_norm": 1.6388927471521617, + "learning_rate": 7.50932705857431e-05, + "loss": 0.9034, + "step": 6861 + }, + { + "epoch": 4.429954809554552, + "grad_norm": 2.1709177790849963, + "learning_rate": 7.50373035992289e-05, + "loss": 1.0018, + "step": 6862 + }, + { + "epoch": 4.430600387346676, + "grad_norm": 1.7009144101505678, + "learning_rate": 7.498135051907315e-05, + "loss": 1.0203, + "step": 6863 + }, + { + "epoch": 4.4312459651388, + "grad_norm": 1.8296485640066993, + "learning_rate": 7.492541135565577e-05, + "loss": 1.176, + "step": 6864 + }, + { + "epoch": 4.431891542930924, + "grad_norm": 1.7137455896110936, + "learning_rate": 7.486948611935404e-05, + "loss": 1.1236, + "step": 6865 + }, + { + "epoch": 4.4325371207230475, + "grad_norm": 1.7469830195441487, + "learning_rate": 7.481357482054266e-05, + "loss": 1.2289, + "step": 6866 + }, + { + "epoch": 4.433182698515171, + "grad_norm": 1.5974839075057583, + "learning_rate": 7.475767746959379e-05, + "loss": 0.9683, + "step": 6867 + }, + { + "epoch": 4.433828276307295, + "grad_norm": 1.649157599370116, + "learning_rate": 7.470179407687694e-05, + "loss": 0.9958, + "step": 6868 + }, + { + "epoch": 4.434473854099419, + "grad_norm": 1.844604608036503, + "learning_rate": 7.464592465275906e-05, + "loss": 0.9653, + "step": 6869 + }, + { + "epoch": 4.435119431891543, + "grad_norm": 1.5949282953518542, + "learning_rate": 7.459006920760447e-05, + "loss": 0.8949, + "step": 6870 + }, + { + "epoch": 4.435765009683667, + "grad_norm": 1.6905062688602295, + "learning_rate": 7.453422775177508e-05, + "loss": 1.0168, + "step": 6871 + }, + { + "epoch": 4.436410587475791, + "grad_norm": 1.894991058375784, + "learning_rate": 7.447840029562991e-05, + "loss": 1.1233, + "step": 6872 + }, + { + "epoch": 4.437056165267915, + "grad_norm": 1.9541477773375775, + "learning_rate": 7.442258684952553e-05, + "loss": 1.0948, + "step": 6873 + }, + { + "epoch": 4.437701743060039, + "grad_norm": 1.7373965166159182, + "learning_rate": 7.436678742381606e-05, + "loss": 1.1583, + "step": 6874 + }, + { + "epoch": 4.438347320852163, + "grad_norm": 1.728259766996563, + "learning_rate": 7.431100202885277e-05, + "loss": 0.9583, + "step": 6875 + }, + { + "epoch": 4.438992898644287, + "grad_norm": 1.6285957560967779, + "learning_rate": 7.425523067498434e-05, + "loss": 0.9733, + "step": 6876 + }, + { + "epoch": 4.439638476436411, + "grad_norm": 1.877587449328927, + "learning_rate": 7.419947337255716e-05, + "loss": 1.1087, + "step": 6877 + }, + { + "epoch": 4.440284054228535, + "grad_norm": 1.6385508540721645, + "learning_rate": 7.414373013191453e-05, + "loss": 1.0051, + "step": 6878 + }, + { + "epoch": 4.440929632020659, + "grad_norm": 1.7636888504480224, + "learning_rate": 7.408800096339763e-05, + "loss": 1.0711, + "step": 6879 + }, + { + "epoch": 4.4415752098127825, + "grad_norm": 1.771192007849736, + "learning_rate": 7.403228587734473e-05, + "loss": 1.1736, + "step": 6880 + }, + { + "epoch": 4.4422207876049065, + "grad_norm": 1.7362237799114417, + "learning_rate": 7.397658488409142e-05, + "loss": 1.0098, + "step": 6881 + }, + { + "epoch": 4.44286636539703, + "grad_norm": 2.029151876377515, + "learning_rate": 7.392089799397098e-05, + "loss": 1.2893, + "step": 6882 + }, + { + "epoch": 4.443511943189154, + "grad_norm": 1.9488794307757307, + "learning_rate": 7.386522521731391e-05, + "loss": 1.2903, + "step": 6883 + }, + { + "epoch": 4.444157520981278, + "grad_norm": 1.8887112657483294, + "learning_rate": 7.38095665644479e-05, + "loss": 1.3487, + "step": 6884 + }, + { + "epoch": 4.444803098773402, + "grad_norm": 1.8211154606713735, + "learning_rate": 7.375392204569842e-05, + "loss": 1.0253, + "step": 6885 + }, + { + "epoch": 4.445448676565526, + "grad_norm": 1.8647698935326595, + "learning_rate": 7.369829167138798e-05, + "loss": 1.0992, + "step": 6886 + }, + { + "epoch": 4.44609425435765, + "grad_norm": 1.8026999855554287, + "learning_rate": 7.364267545183666e-05, + "loss": 1.0933, + "step": 6887 + }, + { + "epoch": 4.446739832149774, + "grad_norm": 1.8993510019999686, + "learning_rate": 7.35870733973618e-05, + "loss": 1.188, + "step": 6888 + }, + { + "epoch": 4.447385409941898, + "grad_norm": 1.9750951627230229, + "learning_rate": 7.35314855182782e-05, + "loss": 0.9414, + "step": 6889 + }, + { + "epoch": 4.448030987734022, + "grad_norm": 2.3228114895614196, + "learning_rate": 7.347591182489795e-05, + "loss": 1.2082, + "step": 6890 + }, + { + "epoch": 4.448676565526146, + "grad_norm": 1.7292875352428414, + "learning_rate": 7.342035232753055e-05, + "loss": 0.998, + "step": 6891 + }, + { + "epoch": 4.44932214331827, + "grad_norm": 1.5948819518302781, + "learning_rate": 7.336480703648289e-05, + "loss": 0.9542, + "step": 6892 + }, + { + "epoch": 4.449967721110394, + "grad_norm": 1.9095118401742923, + "learning_rate": 7.330927596205919e-05, + "loss": 0.9068, + "step": 6893 + }, + { + "epoch": 4.450613298902518, + "grad_norm": 1.6736210993121146, + "learning_rate": 7.325375911456103e-05, + "loss": 1.1084, + "step": 6894 + }, + { + "epoch": 4.4512588766946415, + "grad_norm": 1.9364317448251762, + "learning_rate": 7.31982565042873e-05, + "loss": 1.034, + "step": 6895 + }, + { + "epoch": 4.451904454486765, + "grad_norm": 1.615785097304066, + "learning_rate": 7.314276814153451e-05, + "loss": 0.941, + "step": 6896 + }, + { + "epoch": 4.452550032278889, + "grad_norm": 1.6241948703678188, + "learning_rate": 7.308729403659612e-05, + "loss": 0.9608, + "step": 6897 + }, + { + "epoch": 4.453195610071013, + "grad_norm": 1.5016864476052383, + "learning_rate": 7.303183419976318e-05, + "loss": 0.9055, + "step": 6898 + }, + { + "epoch": 4.453841187863137, + "grad_norm": 1.7467692517740077, + "learning_rate": 7.297638864132423e-05, + "loss": 1.0892, + "step": 6899 + }, + { + "epoch": 4.454486765655261, + "grad_norm": 1.8147618934261867, + "learning_rate": 7.292095737156481e-05, + "loss": 1.0428, + "step": 6900 + }, + { + "epoch": 4.455132343447385, + "grad_norm": 1.7411188393140171, + "learning_rate": 7.286554040076801e-05, + "loss": 1.0552, + "step": 6901 + }, + { + "epoch": 4.455777921239509, + "grad_norm": 1.9076056588412202, + "learning_rate": 7.281013773921443e-05, + "loss": 1.2563, + "step": 6902 + }, + { + "epoch": 4.456423499031633, + "grad_norm": 2.3982020364376195, + "learning_rate": 7.275474939718161e-05, + "loss": 1.2034, + "step": 6903 + }, + { + "epoch": 4.457069076823757, + "grad_norm": 1.9682209640824198, + "learning_rate": 7.269937538494482e-05, + "loss": 1.1588, + "step": 6904 + }, + { + "epoch": 4.457714654615881, + "grad_norm": 2.0081287227962634, + "learning_rate": 7.264401571277652e-05, + "loss": 1.2564, + "step": 6905 + }, + { + "epoch": 4.458360232408005, + "grad_norm": 1.8812767893721394, + "learning_rate": 7.258867039094632e-05, + "loss": 1.0475, + "step": 6906 + }, + { + "epoch": 4.459005810200129, + "grad_norm": 1.8190091499384162, + "learning_rate": 7.253333942972154e-05, + "loss": 1.0593, + "step": 6907 + }, + { + "epoch": 4.4596513879922535, + "grad_norm": 1.8566858298482178, + "learning_rate": 7.247802283936668e-05, + "loss": 1.175, + "step": 6908 + }, + { + "epoch": 4.460296965784377, + "grad_norm": 1.7987230924232558, + "learning_rate": 7.24227206301433e-05, + "loss": 1.167, + "step": 6909 + }, + { + "epoch": 4.460942543576501, + "grad_norm": 1.7812320702603341, + "learning_rate": 7.236743281231075e-05, + "loss": 1.0522, + "step": 6910 + }, + { + "epoch": 4.461588121368625, + "grad_norm": 1.728972240899601, + "learning_rate": 7.231215939612543e-05, + "loss": 1.0388, + "step": 6911 + }, + { + "epoch": 4.462233699160749, + "grad_norm": 1.9889380099823477, + "learning_rate": 7.225690039184114e-05, + "loss": 1.2334, + "step": 6912 + }, + { + "epoch": 4.462879276952873, + "grad_norm": 1.9137171758394456, + "learning_rate": 7.220165580970898e-05, + "loss": 1.2258, + "step": 6913 + }, + { + "epoch": 4.463524854744997, + "grad_norm": 1.7211364411732009, + "learning_rate": 7.21464256599774e-05, + "loss": 1.1744, + "step": 6914 + }, + { + "epoch": 4.464170432537121, + "grad_norm": 1.5552545781407499, + "learning_rate": 7.209120995289217e-05, + "loss": 0.9541, + "step": 6915 + }, + { + "epoch": 4.464816010329245, + "grad_norm": 1.8362454978879275, + "learning_rate": 7.203600869869637e-05, + "loss": 1.0955, + "step": 6916 + }, + { + "epoch": 4.465461588121369, + "grad_norm": 1.6477249506602019, + "learning_rate": 7.198082190763043e-05, + "loss": 0.9217, + "step": 6917 + }, + { + "epoch": 4.466107165913493, + "grad_norm": 1.6309653530293153, + "learning_rate": 7.192564958993204e-05, + "loss": 1.0423, + "step": 6918 + }, + { + "epoch": 4.466752743705617, + "grad_norm": 1.7622919871535319, + "learning_rate": 7.187049175583626e-05, + "loss": 1.09, + "step": 6919 + }, + { + "epoch": 4.467398321497741, + "grad_norm": 1.7615343511566925, + "learning_rate": 7.181534841557536e-05, + "loss": 1.0139, + "step": 6920 + }, + { + "epoch": 4.468043899289865, + "grad_norm": 1.7137712664441773, + "learning_rate": 7.176021957937922e-05, + "loss": 0.9801, + "step": 6921 + }, + { + "epoch": 4.4686894770819885, + "grad_norm": 1.7293934010343992, + "learning_rate": 7.170510525747459e-05, + "loss": 1.0535, + "step": 6922 + }, + { + "epoch": 4.469335054874112, + "grad_norm": 1.7825487267374724, + "learning_rate": 7.165000546008577e-05, + "loss": 1.1436, + "step": 6923 + }, + { + "epoch": 4.469980632666236, + "grad_norm": 1.744989871530504, + "learning_rate": 7.159492019743452e-05, + "loss": 0.9422, + "step": 6924 + }, + { + "epoch": 4.47062621045836, + "grad_norm": 1.717969775627302, + "learning_rate": 7.153984947973956e-05, + "loss": 0.9768, + "step": 6925 + }, + { + "epoch": 4.471271788250484, + "grad_norm": 1.9142808521811348, + "learning_rate": 7.148479331721705e-05, + "loss": 1.1279, + "step": 6926 + }, + { + "epoch": 4.471917366042608, + "grad_norm": 1.622524906435612, + "learning_rate": 7.142975172008068e-05, + "loss": 0.9057, + "step": 6927 + }, + { + "epoch": 4.472562943834732, + "grad_norm": 1.7563858033346105, + "learning_rate": 7.1374724698541e-05, + "loss": 1.0787, + "step": 6928 + }, + { + "epoch": 4.473208521626856, + "grad_norm": 1.7775747132624256, + "learning_rate": 7.131971226280628e-05, + "loss": 1.0915, + "step": 6929 + }, + { + "epoch": 4.47385409941898, + "grad_norm": 1.723499411872625, + "learning_rate": 7.126471442308188e-05, + "loss": 1.0059, + "step": 6930 + }, + { + "epoch": 4.474499677211104, + "grad_norm": 1.8487775075961388, + "learning_rate": 7.120973118957028e-05, + "loss": 1.1725, + "step": 6931 + }, + { + "epoch": 4.475145255003228, + "grad_norm": 1.6659268799604392, + "learning_rate": 7.115476257247166e-05, + "loss": 0.9235, + "step": 6932 + }, + { + "epoch": 4.475790832795352, + "grad_norm": 2.0097251095750783, + "learning_rate": 7.109980858198323e-05, + "loss": 1.0808, + "step": 6933 + }, + { + "epoch": 4.476436410587476, + "grad_norm": 1.6743319418744558, + "learning_rate": 7.10448692282994e-05, + "loss": 1.0313, + "step": 6934 + }, + { + "epoch": 4.4770819883796, + "grad_norm": 1.6583516520642483, + "learning_rate": 7.098994452161211e-05, + "loss": 0.983, + "step": 6935 + }, + { + "epoch": 4.4777275661717235, + "grad_norm": 1.7870650592209993, + "learning_rate": 7.093503447211043e-05, + "loss": 0.8814, + "step": 6936 + }, + { + "epoch": 4.4783731439638474, + "grad_norm": 1.7253922785996452, + "learning_rate": 7.088013908998073e-05, + "loss": 1.0436, + "step": 6937 + }, + { + "epoch": 4.479018721755971, + "grad_norm": 1.8206817984696868, + "learning_rate": 7.082525838540669e-05, + "loss": 1.0296, + "step": 6938 + }, + { + "epoch": 4.479664299548095, + "grad_norm": 1.7894731035636022, + "learning_rate": 7.077039236856926e-05, + "loss": 1.1433, + "step": 6939 + }, + { + "epoch": 4.480309877340219, + "grad_norm": 1.6830285863536363, + "learning_rate": 7.071554104964663e-05, + "loss": 1.0113, + "step": 6940 + }, + { + "epoch": 4.480955455132343, + "grad_norm": 1.6842872651125673, + "learning_rate": 7.06607044388143e-05, + "loss": 0.9747, + "step": 6941 + }, + { + "epoch": 4.481601032924467, + "grad_norm": 1.7151073463842246, + "learning_rate": 7.060588254624498e-05, + "loss": 0.9688, + "step": 6942 + }, + { + "epoch": 4.482246610716591, + "grad_norm": 1.7525363045620148, + "learning_rate": 7.055107538210886e-05, + "loss": 1.1041, + "step": 6943 + }, + { + "epoch": 4.482892188508715, + "grad_norm": 1.8018740314058725, + "learning_rate": 7.049628295657307e-05, + "loss": 0.9731, + "step": 6944 + }, + { + "epoch": 4.483537766300839, + "grad_norm": 1.6190791564648472, + "learning_rate": 7.044150527980218e-05, + "loss": 0.8916, + "step": 6945 + }, + { + "epoch": 4.484183344092963, + "grad_norm": 1.8035792390343577, + "learning_rate": 7.03867423619582e-05, + "loss": 1.0299, + "step": 6946 + }, + { + "epoch": 4.484828921885087, + "grad_norm": 1.6005147503422823, + "learning_rate": 7.033199421320001e-05, + "loss": 0.9749, + "step": 6947 + }, + { + "epoch": 4.485474499677212, + "grad_norm": 1.9406028050067867, + "learning_rate": 7.027726084368399e-05, + "loss": 1.1071, + "step": 6948 + }, + { + "epoch": 4.4861200774693355, + "grad_norm": 1.6692026443422794, + "learning_rate": 7.022254226356392e-05, + "loss": 0.9828, + "step": 6949 + }, + { + "epoch": 4.486765655261459, + "grad_norm": 1.8322997805249537, + "learning_rate": 7.01678384829905e-05, + "loss": 1.1378, + "step": 6950 + }, + { + "epoch": 4.487411233053583, + "grad_norm": 1.928170505238336, + "learning_rate": 7.011314951211181e-05, + "loss": 1.1817, + "step": 6951 + }, + { + "epoch": 4.488056810845707, + "grad_norm": 1.5332536490028117, + "learning_rate": 7.005847536107343e-05, + "loss": 0.8203, + "step": 6952 + }, + { + "epoch": 4.488702388637831, + "grad_norm": 1.7391250729676015, + "learning_rate": 7.000381604001775e-05, + "loss": 1.0308, + "step": 6953 + }, + { + "epoch": 4.489347966429955, + "grad_norm": 1.6080743505781634, + "learning_rate": 6.994917155908479e-05, + "loss": 1.0491, + "step": 6954 + }, + { + "epoch": 4.489993544222079, + "grad_norm": 1.8768806849857782, + "learning_rate": 6.98945419284117e-05, + "loss": 1.0776, + "step": 6955 + }, + { + "epoch": 4.490639122014203, + "grad_norm": 1.5119911184694343, + "learning_rate": 6.983992715813264e-05, + "loss": 0.8684, + "step": 6956 + }, + { + "epoch": 4.491284699806327, + "grad_norm": 1.8785714796875927, + "learning_rate": 6.978532725837944e-05, + "loss": 1.0686, + "step": 6957 + }, + { + "epoch": 4.491930277598451, + "grad_norm": 1.7658089951507707, + "learning_rate": 6.973074223928087e-05, + "loss": 1.1105, + "step": 6958 + }, + { + "epoch": 4.492575855390575, + "grad_norm": 1.8538722938713188, + "learning_rate": 6.967617211096292e-05, + "loss": 1.1819, + "step": 6959 + }, + { + "epoch": 4.493221433182699, + "grad_norm": 1.7855207674345615, + "learning_rate": 6.962161688354903e-05, + "loss": 1.1146, + "step": 6960 + }, + { + "epoch": 4.493867010974823, + "grad_norm": 1.6844059257066089, + "learning_rate": 6.956707656715976e-05, + "loss": 1.1051, + "step": 6961 + }, + { + "epoch": 4.494512588766947, + "grad_norm": 1.7076519167885087, + "learning_rate": 6.951255117191287e-05, + "loss": 0.9494, + "step": 6962 + }, + { + "epoch": 4.4951581665590705, + "grad_norm": 1.7424240511422653, + "learning_rate": 6.945804070792339e-05, + "loss": 0.9612, + "step": 6963 + }, + { + "epoch": 4.4958037443511945, + "grad_norm": 1.875129439126831, + "learning_rate": 6.940354518530357e-05, + "loss": 1.0869, + "step": 6964 + }, + { + "epoch": 4.496449322143318, + "grad_norm": 1.6676531607595269, + "learning_rate": 6.934906461416291e-05, + "loss": 1.036, + "step": 6965 + }, + { + "epoch": 4.497094899935442, + "grad_norm": 2.2277613520582857, + "learning_rate": 6.929459900460813e-05, + "loss": 1.302, + "step": 6966 + }, + { + "epoch": 4.497740477727566, + "grad_norm": 1.8692900258394762, + "learning_rate": 6.924014836674308e-05, + "loss": 1.0252, + "step": 6967 + }, + { + "epoch": 4.49838605551969, + "grad_norm": 1.98270354253528, + "learning_rate": 6.918571271066912e-05, + "loss": 1.2313, + "step": 6968 + }, + { + "epoch": 4.499031633311814, + "grad_norm": 1.6975119853419787, + "learning_rate": 6.913129204648442e-05, + "loss": 0.9847, + "step": 6969 + }, + { + "epoch": 4.499677211103938, + "grad_norm": 1.777206166389063, + "learning_rate": 6.907688638428463e-05, + "loss": 1.0795, + "step": 6970 + }, + { + "epoch": 4.500322788896062, + "grad_norm": 1.9454957156047707, + "learning_rate": 6.902249573416268e-05, + "loss": 1.0975, + "step": 6971 + }, + { + "epoch": 4.500968366688186, + "grad_norm": 1.579130057526844, + "learning_rate": 6.896812010620847e-05, + "loss": 0.9279, + "step": 6972 + }, + { + "epoch": 4.50161394448031, + "grad_norm": 1.7454386598409992, + "learning_rate": 6.891375951050924e-05, + "loss": 1.0825, + "step": 6973 + }, + { + "epoch": 4.502259522272434, + "grad_norm": 1.8968680183947344, + "learning_rate": 6.88594139571496e-05, + "loss": 1.159, + "step": 6974 + }, + { + "epoch": 4.502905100064558, + "grad_norm": 1.7306211771135085, + "learning_rate": 6.880508345621105e-05, + "loss": 1.0447, + "step": 6975 + }, + { + "epoch": 4.503550677856682, + "grad_norm": 1.8402452500916537, + "learning_rate": 6.875076801777246e-05, + "loss": 1.1543, + "step": 6976 + }, + { + "epoch": 4.5041962556488055, + "grad_norm": 1.7004350008340663, + "learning_rate": 6.86964676519101e-05, + "loss": 1.11, + "step": 6977 + }, + { + "epoch": 4.5048418334409295, + "grad_norm": 1.8684155339864918, + "learning_rate": 6.864218236869696e-05, + "loss": 1.1014, + "step": 6978 + }, + { + "epoch": 4.505487411233053, + "grad_norm": 1.8813938900666518, + "learning_rate": 6.858791217820378e-05, + "loss": 1.0747, + "step": 6979 + }, + { + "epoch": 4.506132989025177, + "grad_norm": 1.7275414793245782, + "learning_rate": 6.853365709049819e-05, + "loss": 1.0319, + "step": 6980 + }, + { + "epoch": 4.506778566817301, + "grad_norm": 1.582875346309583, + "learning_rate": 6.847941711564491e-05, + "loss": 0.9072, + "step": 6981 + }, + { + "epoch": 4.507424144609425, + "grad_norm": 1.671741973831976, + "learning_rate": 6.842519226370621e-05, + "loss": 1.0097, + "step": 6982 + }, + { + "epoch": 4.508069722401549, + "grad_norm": 1.97860202506099, + "learning_rate": 6.837098254474133e-05, + "loss": 1.0505, + "step": 6983 + }, + { + "epoch": 4.508715300193673, + "grad_norm": 1.9614454438307438, + "learning_rate": 6.831678796880659e-05, + "loss": 1.2317, + "step": 6984 + }, + { + "epoch": 4.509360877985797, + "grad_norm": 1.4926590906161008, + "learning_rate": 6.82626085459558e-05, + "loss": 0.9096, + "step": 6985 + }, + { + "epoch": 4.510006455777921, + "grad_norm": 1.9247729257134782, + "learning_rate": 6.820844428623976e-05, + "loss": 0.9765, + "step": 6986 + }, + { + "epoch": 4.510652033570045, + "grad_norm": 1.7978427145400104, + "learning_rate": 6.81542951997065e-05, + "loss": 1.0539, + "step": 6987 + }, + { + "epoch": 4.511297611362169, + "grad_norm": 1.6801639190724478, + "learning_rate": 6.810016129640126e-05, + "loss": 0.9764, + "step": 6988 + }, + { + "epoch": 4.511943189154293, + "grad_norm": 1.5368737882232193, + "learning_rate": 6.804604258636638e-05, + "loss": 0.9049, + "step": 6989 + }, + { + "epoch": 4.512588766946417, + "grad_norm": 1.8516668437612929, + "learning_rate": 6.79919390796415e-05, + "loss": 0.9911, + "step": 6990 + }, + { + "epoch": 4.513234344738541, + "grad_norm": 1.7057699011554477, + "learning_rate": 6.793785078626338e-05, + "loss": 1.0787, + "step": 6991 + }, + { + "epoch": 4.5138799225306645, + "grad_norm": 1.699890608856272, + "learning_rate": 6.788377771626587e-05, + "loss": 1.0654, + "step": 6992 + }, + { + "epoch": 4.514525500322788, + "grad_norm": 1.8149514713677037, + "learning_rate": 6.782971987968027e-05, + "loss": 1.154, + "step": 6993 + }, + { + "epoch": 4.515171078114912, + "grad_norm": 1.6352451265741645, + "learning_rate": 6.777567728653471e-05, + "loss": 1.0771, + "step": 6994 + }, + { + "epoch": 4.515816655907037, + "grad_norm": 1.7626791213204824, + "learning_rate": 6.772164994685463e-05, + "loss": 0.9284, + "step": 6995 + }, + { + "epoch": 4.516462233699161, + "grad_norm": 1.9429495262556693, + "learning_rate": 6.766763787066287e-05, + "loss": 1.0591, + "step": 6996 + }, + { + "epoch": 4.517107811491285, + "grad_norm": 1.7145422897692935, + "learning_rate": 6.761364106797903e-05, + "loss": 1.0085, + "step": 6997 + }, + { + "epoch": 4.517753389283409, + "grad_norm": 1.5652229324692946, + "learning_rate": 6.755965954882008e-05, + "loss": 0.9727, + "step": 6998 + }, + { + "epoch": 4.518398967075533, + "grad_norm": 1.585343482025889, + "learning_rate": 6.750569332320034e-05, + "loss": 1.0421, + "step": 6999 + }, + { + "epoch": 4.519044544867657, + "grad_norm": 1.573945384778509, + "learning_rate": 6.74517424011309e-05, + "loss": 0.9536, + "step": 7000 + }, + { + "epoch": 4.519690122659781, + "grad_norm": 1.970935685543685, + "learning_rate": 6.739780679262025e-05, + "loss": 1.1059, + "step": 7001 + }, + { + "epoch": 4.520335700451905, + "grad_norm": 1.70153067392025, + "learning_rate": 6.734388650767418e-05, + "loss": 0.9635, + "step": 7002 + }, + { + "epoch": 4.520981278244029, + "grad_norm": 1.776110132920144, + "learning_rate": 6.728998155629521e-05, + "loss": 1.1704, + "step": 7003 + }, + { + "epoch": 4.5216268560361526, + "grad_norm": 1.9072367854328294, + "learning_rate": 6.723609194848345e-05, + "loss": 1.0951, + "step": 7004 + }, + { + "epoch": 4.5222724338282765, + "grad_norm": 2.060861612774119, + "learning_rate": 6.718221769423598e-05, + "loss": 1.1302, + "step": 7005 + }, + { + "epoch": 4.5229180116204, + "grad_norm": 1.9621039450607116, + "learning_rate": 6.712835880354685e-05, + "loss": 0.9783, + "step": 7006 + }, + { + "epoch": 4.523563589412524, + "grad_norm": 1.8467687773874808, + "learning_rate": 6.707451528640765e-05, + "loss": 1.1223, + "step": 7007 + }, + { + "epoch": 4.524209167204648, + "grad_norm": 1.8205903983134004, + "learning_rate": 6.702068715280688e-05, + "loss": 1.1087, + "step": 7008 + }, + { + "epoch": 4.524854744996772, + "grad_norm": 1.8386394843776364, + "learning_rate": 6.696687441273006e-05, + "loss": 1.0902, + "step": 7009 + }, + { + "epoch": 4.525500322788896, + "grad_norm": 1.6819622726806258, + "learning_rate": 6.691307707616018e-05, + "loss": 1.0379, + "step": 7010 + }, + { + "epoch": 4.52614590058102, + "grad_norm": 1.8124446117597202, + "learning_rate": 6.685929515307713e-05, + "loss": 1.2313, + "step": 7011 + }, + { + "epoch": 4.526791478373144, + "grad_norm": 1.7900622592947248, + "learning_rate": 6.680552865345804e-05, + "loss": 1.0591, + "step": 7012 + }, + { + "epoch": 4.527437056165268, + "grad_norm": 1.874142754366691, + "learning_rate": 6.675177758727715e-05, + "loss": 1.0661, + "step": 7013 + }, + { + "epoch": 4.528082633957392, + "grad_norm": 1.6629625780039583, + "learning_rate": 6.669804196450584e-05, + "loss": 0.9767, + "step": 7014 + }, + { + "epoch": 4.528728211749516, + "grad_norm": 1.6916280147300309, + "learning_rate": 6.664432179511262e-05, + "loss": 1.0458, + "step": 7015 + }, + { + "epoch": 4.52937378954164, + "grad_norm": 1.6925239443929005, + "learning_rate": 6.659061708906314e-05, + "loss": 1.0463, + "step": 7016 + }, + { + "epoch": 4.530019367333764, + "grad_norm": 1.7738178506851026, + "learning_rate": 6.653692785632011e-05, + "loss": 1.0711, + "step": 7017 + }, + { + "epoch": 4.530664945125888, + "grad_norm": 1.748976687941133, + "learning_rate": 6.648325410684363e-05, + "loss": 1.1421, + "step": 7018 + }, + { + "epoch": 4.5313105229180115, + "grad_norm": 1.87991619685323, + "learning_rate": 6.642959585059059e-05, + "loss": 1.0501, + "step": 7019 + }, + { + "epoch": 4.531956100710135, + "grad_norm": 1.7954963196752862, + "learning_rate": 6.637595309751511e-05, + "loss": 1.0189, + "step": 7020 + }, + { + "epoch": 4.532601678502259, + "grad_norm": 1.8255661559795693, + "learning_rate": 6.632232585756866e-05, + "loss": 0.9917, + "step": 7021 + }, + { + "epoch": 4.533247256294383, + "grad_norm": 1.6738856769182315, + "learning_rate": 6.626871414069948e-05, + "loss": 0.9015, + "step": 7022 + }, + { + "epoch": 4.533892834086507, + "grad_norm": 1.7197805186529005, + "learning_rate": 6.621511795685311e-05, + "loss": 1.0551, + "step": 7023 + }, + { + "epoch": 4.534538411878631, + "grad_norm": 1.556270309424585, + "learning_rate": 6.616153731597237e-05, + "loss": 0.923, + "step": 7024 + }, + { + "epoch": 4.535183989670755, + "grad_norm": 1.7036352912868344, + "learning_rate": 6.610797222799685e-05, + "loss": 1.0446, + "step": 7025 + }, + { + "epoch": 4.535829567462879, + "grad_norm": 1.7316176226273392, + "learning_rate": 6.605442270286342e-05, + "loss": 1.0653, + "step": 7026 + }, + { + "epoch": 4.536475145255003, + "grad_norm": 1.499247728913412, + "learning_rate": 6.600088875050625e-05, + "loss": 0.8733, + "step": 7027 + }, + { + "epoch": 4.537120723047127, + "grad_norm": 1.7306123334980748, + "learning_rate": 6.594737038085622e-05, + "loss": 1.0781, + "step": 7028 + }, + { + "epoch": 4.537766300839251, + "grad_norm": 1.607339267060618, + "learning_rate": 6.58938676038417e-05, + "loss": 0.8861, + "step": 7029 + }, + { + "epoch": 4.538411878631375, + "grad_norm": 1.907685122235761, + "learning_rate": 6.584038042938804e-05, + "loss": 1.1008, + "step": 7030 + }, + { + "epoch": 4.539057456423499, + "grad_norm": 1.7245978324610678, + "learning_rate": 6.578690886741748e-05, + "loss": 0.8253, + "step": 7031 + }, + { + "epoch": 4.539703034215623, + "grad_norm": 1.6967717627258632, + "learning_rate": 6.573345292784971e-05, + "loss": 1.0474, + "step": 7032 + }, + { + "epoch": 4.540348612007747, + "grad_norm": 1.6649969585998756, + "learning_rate": 6.568001262060136e-05, + "loss": 1.0895, + "step": 7033 + }, + { + "epoch": 4.540994189799871, + "grad_norm": 1.4857541039417521, + "learning_rate": 6.562658795558602e-05, + "loss": 0.8913, + "step": 7034 + }, + { + "epoch": 4.541639767591995, + "grad_norm": 2.0010614663595376, + "learning_rate": 6.557317894271467e-05, + "loss": 1.1087, + "step": 7035 + }, + { + "epoch": 4.542285345384119, + "grad_norm": 1.7766495768016957, + "learning_rate": 6.551978559189517e-05, + "loss": 1.0436, + "step": 7036 + }, + { + "epoch": 4.542930923176243, + "grad_norm": 1.943040044672367, + "learning_rate": 6.546640791303255e-05, + "loss": 0.9569, + "step": 7037 + }, + { + "epoch": 4.543576500968367, + "grad_norm": 1.8495770423994864, + "learning_rate": 6.541304591602893e-05, + "loss": 0.9335, + "step": 7038 + }, + { + "epoch": 4.544222078760491, + "grad_norm": 1.6901434364076493, + "learning_rate": 6.535969961078353e-05, + "loss": 0.9569, + "step": 7039 + }, + { + "epoch": 4.544867656552615, + "grad_norm": 1.8351022704934863, + "learning_rate": 6.53063690071926e-05, + "loss": 0.9849, + "step": 7040 + }, + { + "epoch": 4.545513234344739, + "grad_norm": 1.6860273811037523, + "learning_rate": 6.525305411514956e-05, + "loss": 0.9234, + "step": 7041 + }, + { + "epoch": 4.546158812136863, + "grad_norm": 1.7516058867529045, + "learning_rate": 6.51997549445448e-05, + "loss": 0.9159, + "step": 7042 + }, + { + "epoch": 4.546804389928987, + "grad_norm": 1.6788989791502174, + "learning_rate": 6.514647150526604e-05, + "loss": 1.0387, + "step": 7043 + }, + { + "epoch": 4.547449967721111, + "grad_norm": 1.7437397555916705, + "learning_rate": 6.509320380719774e-05, + "loss": 1.072, + "step": 7044 + }, + { + "epoch": 4.548095545513235, + "grad_norm": 1.5453035829881154, + "learning_rate": 6.503995186022162e-05, + "loss": 1.0168, + "step": 7045 + }, + { + "epoch": 4.5487411233053585, + "grad_norm": 1.8774805135276766, + "learning_rate": 6.498671567421664e-05, + "loss": 1.0329, + "step": 7046 + }, + { + "epoch": 4.549386701097482, + "grad_norm": 1.7760584399565664, + "learning_rate": 6.493349525905849e-05, + "loss": 1.0449, + "step": 7047 + }, + { + "epoch": 4.550032278889606, + "grad_norm": 1.8390802672187456, + "learning_rate": 6.488029062462012e-05, + "loss": 1.0128, + "step": 7048 + }, + { + "epoch": 4.55067785668173, + "grad_norm": 1.9960904387883316, + "learning_rate": 6.482710178077167e-05, + "loss": 1.1326, + "step": 7049 + }, + { + "epoch": 4.551323434473854, + "grad_norm": 1.9881221476690947, + "learning_rate": 6.477392873738006e-05, + "loss": 1.0775, + "step": 7050 + }, + { + "epoch": 4.551969012265978, + "grad_norm": 1.6408821981541106, + "learning_rate": 6.472077150430957e-05, + "loss": 0.9116, + "step": 7051 + }, + { + "epoch": 4.552614590058102, + "grad_norm": 1.7781255103603322, + "learning_rate": 6.466763009142142e-05, + "loss": 1.0244, + "step": 7052 + }, + { + "epoch": 4.553260167850226, + "grad_norm": 2.009804113873138, + "learning_rate": 6.461450450857373e-05, + "loss": 1.2423, + "step": 7053 + }, + { + "epoch": 4.55390574564235, + "grad_norm": 1.9470784228737008, + "learning_rate": 6.456139476562203e-05, + "loss": 0.9823, + "step": 7054 + }, + { + "epoch": 4.554551323434474, + "grad_norm": 1.8246380463707448, + "learning_rate": 6.45083008724187e-05, + "loss": 1.0591, + "step": 7055 + }, + { + "epoch": 4.555196901226598, + "grad_norm": 1.6155603483451682, + "learning_rate": 6.445522283881308e-05, + "loss": 0.955, + "step": 7056 + }, + { + "epoch": 4.555842479018722, + "grad_norm": 1.7940080124366777, + "learning_rate": 6.440216067465183e-05, + "loss": 1.0969, + "step": 7057 + }, + { + "epoch": 4.556488056810846, + "grad_norm": 2.174349506284703, + "learning_rate": 6.434911438977855e-05, + "loss": 0.9646, + "step": 7058 + }, + { + "epoch": 4.55713363460297, + "grad_norm": 1.6327826541363386, + "learning_rate": 6.429608399403369e-05, + "loss": 0.9019, + "step": 7059 + }, + { + "epoch": 4.5577792123950935, + "grad_norm": 1.636674334829335, + "learning_rate": 6.424306949725513e-05, + "loss": 0.9632, + "step": 7060 + }, + { + "epoch": 4.5584247901872175, + "grad_norm": 1.6094263198600036, + "learning_rate": 6.419007090927755e-05, + "loss": 0.9855, + "step": 7061 + }, + { + "epoch": 4.559070367979341, + "grad_norm": 1.8548084180011113, + "learning_rate": 6.413708823993272e-05, + "loss": 0.9846, + "step": 7062 + }, + { + "epoch": 4.559715945771465, + "grad_norm": 1.7580036089505324, + "learning_rate": 6.40841214990495e-05, + "loss": 0.974, + "step": 7063 + }, + { + "epoch": 4.560361523563589, + "grad_norm": 1.8485230606031922, + "learning_rate": 6.403117069645375e-05, + "loss": 1.0697, + "step": 7064 + }, + { + "epoch": 4.561007101355713, + "grad_norm": 1.6117058529401973, + "learning_rate": 6.39782358419684e-05, + "loss": 0.9946, + "step": 7065 + }, + { + "epoch": 4.561652679147837, + "grad_norm": 1.7851854914215222, + "learning_rate": 6.392531694541342e-05, + "loss": 1.0491, + "step": 7066 + }, + { + "epoch": 4.562298256939961, + "grad_norm": 2.0640538145768677, + "learning_rate": 6.387241401660575e-05, + "loss": 1.2963, + "step": 7067 + }, + { + "epoch": 4.562943834732085, + "grad_norm": 1.7681597821614874, + "learning_rate": 6.38195270653596e-05, + "loss": 0.9738, + "step": 7068 + }, + { + "epoch": 4.563589412524209, + "grad_norm": 2.1309659655847994, + "learning_rate": 6.37666561014859e-05, + "loss": 1.3656, + "step": 7069 + }, + { + "epoch": 4.564234990316333, + "grad_norm": 1.6349769471084625, + "learning_rate": 6.371380113479273e-05, + "loss": 0.9544, + "step": 7070 + }, + { + "epoch": 4.564880568108457, + "grad_norm": 1.7780917037506632, + "learning_rate": 6.366096217508544e-05, + "loss": 0.9305, + "step": 7071 + }, + { + "epoch": 4.565526145900581, + "grad_norm": 2.0328400247602567, + "learning_rate": 6.360813923216602e-05, + "loss": 1.1894, + "step": 7072 + }, + { + "epoch": 4.566171723692705, + "grad_norm": 1.7357780949873225, + "learning_rate": 6.355533231583366e-05, + "loss": 0.9841, + "step": 7073 + }, + { + "epoch": 4.566817301484829, + "grad_norm": 1.6892453919090156, + "learning_rate": 6.350254143588479e-05, + "loss": 0.9003, + "step": 7074 + }, + { + "epoch": 4.5674628792769525, + "grad_norm": 1.7319333174880558, + "learning_rate": 6.34497666021124e-05, + "loss": 1.1061, + "step": 7075 + }, + { + "epoch": 4.568108457069076, + "grad_norm": 1.693975855888013, + "learning_rate": 6.339700782430699e-05, + "loss": 1.0504, + "step": 7076 + }, + { + "epoch": 4.5687540348612, + "grad_norm": 1.704401111042301, + "learning_rate": 6.334426511225582e-05, + "loss": 0.9647, + "step": 7077 + }, + { + "epoch": 4.569399612653324, + "grad_norm": 1.6391246789762635, + "learning_rate": 6.329153847574305e-05, + "loss": 1.0115, + "step": 7078 + }, + { + "epoch": 4.570045190445448, + "grad_norm": 2.519388644675176, + "learning_rate": 6.323882792455021e-05, + "loss": 0.8739, + "step": 7079 + }, + { + "epoch": 4.570690768237572, + "grad_norm": 1.912514688530558, + "learning_rate": 6.318613346845562e-05, + "loss": 1.1821, + "step": 7080 + }, + { + "epoch": 4.571336346029697, + "grad_norm": 1.7427560941615658, + "learning_rate": 6.313345511723453e-05, + "loss": 1.0482, + "step": 7081 + }, + { + "epoch": 4.571981923821821, + "grad_norm": 1.8529173679858821, + "learning_rate": 6.308079288065942e-05, + "loss": 1.1547, + "step": 7082 + }, + { + "epoch": 4.572627501613945, + "grad_norm": 1.9388351767012937, + "learning_rate": 6.302814676849973e-05, + "loss": 0.9755, + "step": 7083 + }, + { + "epoch": 4.573273079406069, + "grad_norm": 1.8924458079426714, + "learning_rate": 6.29755167905217e-05, + "loss": 1.1004, + "step": 7084 + }, + { + "epoch": 4.573918657198193, + "grad_norm": 1.8281948694652144, + "learning_rate": 6.292290295648887e-05, + "loss": 0.9298, + "step": 7085 + }, + { + "epoch": 4.574564234990317, + "grad_norm": 1.8754054225350036, + "learning_rate": 6.287030527616161e-05, + "loss": 1.0795, + "step": 7086 + }, + { + "epoch": 4.5752098127824405, + "grad_norm": 2.1777060331095375, + "learning_rate": 6.281772375929735e-05, + "loss": 1.0046, + "step": 7087 + }, + { + "epoch": 4.5758553905745645, + "grad_norm": 1.743249752675348, + "learning_rate": 6.27651584156505e-05, + "loss": 0.9971, + "step": 7088 + }, + { + "epoch": 4.576500968366688, + "grad_norm": 1.848157797612322, + "learning_rate": 6.271260925497245e-05, + "loss": 0.832, + "step": 7089 + }, + { + "epoch": 4.577146546158812, + "grad_norm": 1.5747849752275467, + "learning_rate": 6.266007628701166e-05, + "loss": 0.9673, + "step": 7090 + }, + { + "epoch": 4.577792123950936, + "grad_norm": 1.805143185318996, + "learning_rate": 6.260755952151352e-05, + "loss": 1.0052, + "step": 7091 + }, + { + "epoch": 4.57843770174306, + "grad_norm": 1.9294139739277067, + "learning_rate": 6.255505896822036e-05, + "loss": 1.1207, + "step": 7092 + }, + { + "epoch": 4.579083279535184, + "grad_norm": 1.616822762631443, + "learning_rate": 6.250257463687177e-05, + "loss": 0.9374, + "step": 7093 + }, + { + "epoch": 4.579728857327308, + "grad_norm": 1.5365765278440375, + "learning_rate": 6.245010653720399e-05, + "loss": 0.8524, + "step": 7094 + }, + { + "epoch": 4.580374435119432, + "grad_norm": 1.869448326692014, + "learning_rate": 6.239765467895035e-05, + "loss": 1.1485, + "step": 7095 + }, + { + "epoch": 4.581020012911556, + "grad_norm": 1.7842719432389964, + "learning_rate": 6.23452190718414e-05, + "loss": 1.0621, + "step": 7096 + }, + { + "epoch": 4.58166559070368, + "grad_norm": 1.927674386998472, + "learning_rate": 6.229279972560434e-05, + "loss": 1.1237, + "step": 7097 + }, + { + "epoch": 4.582311168495804, + "grad_norm": 1.5725565255503409, + "learning_rate": 6.224039664996347e-05, + "loss": 0.9803, + "step": 7098 + }, + { + "epoch": 4.582956746287928, + "grad_norm": 2.34145381020012, + "learning_rate": 6.218800985464029e-05, + "loss": 1.0565, + "step": 7099 + }, + { + "epoch": 4.583602324080052, + "grad_norm": 2.1101454994758537, + "learning_rate": 6.21356393493529e-05, + "loss": 1.1649, + "step": 7100 + }, + { + "epoch": 4.584247901872176, + "grad_norm": 1.718307380321281, + "learning_rate": 6.20832851438167e-05, + "loss": 1.0013, + "step": 7101 + }, + { + "epoch": 4.5848934796642995, + "grad_norm": 1.657230123795402, + "learning_rate": 6.203094724774393e-05, + "loss": 1.073, + "step": 7102 + }, + { + "epoch": 4.585539057456423, + "grad_norm": 1.7552691306145034, + "learning_rate": 6.19786256708437e-05, + "loss": 0.9955, + "step": 7103 + }, + { + "epoch": 4.586184635248547, + "grad_norm": 1.7022571467903604, + "learning_rate": 6.192632042282231e-05, + "loss": 1.0074, + "step": 7104 + }, + { + "epoch": 4.586830213040671, + "grad_norm": 1.6399647162565014, + "learning_rate": 6.187403151338297e-05, + "loss": 0.9378, + "step": 7105 + }, + { + "epoch": 4.587475790832795, + "grad_norm": 1.7287085529374933, + "learning_rate": 6.182175895222564e-05, + "loss": 1.0503, + "step": 7106 + }, + { + "epoch": 4.588121368624919, + "grad_norm": 1.8584464915716634, + "learning_rate": 6.176950274904758e-05, + "loss": 1.0905, + "step": 7107 + }, + { + "epoch": 4.588766946417043, + "grad_norm": 1.6930266228663295, + "learning_rate": 6.171726291354286e-05, + "loss": 1.0375, + "step": 7108 + }, + { + "epoch": 4.589412524209167, + "grad_norm": 1.8161142887028703, + "learning_rate": 6.166503945540234e-05, + "loss": 0.9719, + "step": 7109 + }, + { + "epoch": 4.590058102001291, + "grad_norm": 1.6684374027021822, + "learning_rate": 6.16128323843142e-05, + "loss": 0.9732, + "step": 7110 + }, + { + "epoch": 4.590703679793415, + "grad_norm": 1.8902716076565031, + "learning_rate": 6.156064170996332e-05, + "loss": 1.2356, + "step": 7111 + }, + { + "epoch": 4.591349257585539, + "grad_norm": 1.8222541068941809, + "learning_rate": 6.150846744203164e-05, + "loss": 1.0979, + "step": 7112 + }, + { + "epoch": 4.591994835377663, + "grad_norm": 1.977050858281975, + "learning_rate": 6.145630959019798e-05, + "loss": 1.0806, + "step": 7113 + }, + { + "epoch": 4.592640413169787, + "grad_norm": 1.6598523902693936, + "learning_rate": 6.140416816413818e-05, + "loss": 0.9644, + "step": 7114 + }, + { + "epoch": 4.593285990961911, + "grad_norm": 1.5329364247395716, + "learning_rate": 6.135204317352505e-05, + "loss": 0.9539, + "step": 7115 + }, + { + "epoch": 4.5939315687540345, + "grad_norm": 1.6968969368413709, + "learning_rate": 6.129993462802829e-05, + "loss": 0.9991, + "step": 7116 + }, + { + "epoch": 4.5945771465461585, + "grad_norm": 1.5693756307483264, + "learning_rate": 6.124784253731452e-05, + "loss": 0.9352, + "step": 7117 + }, + { + "epoch": 4.595222724338282, + "grad_norm": 2.0199380405984866, + "learning_rate": 6.119576691104751e-05, + "loss": 1.1332, + "step": 7118 + }, + { + "epoch": 4.595868302130407, + "grad_norm": 1.9717292182876283, + "learning_rate": 6.114370775888772e-05, + "loss": 1.186, + "step": 7119 + }, + { + "epoch": 4.596513879922531, + "grad_norm": 1.6535413168940567, + "learning_rate": 6.109166509049261e-05, + "loss": 0.9642, + "step": 7120 + }, + { + "epoch": 4.597159457714655, + "grad_norm": 1.6264469204966168, + "learning_rate": 6.103963891551683e-05, + "loss": 0.8629, + "step": 7121 + }, + { + "epoch": 4.597805035506779, + "grad_norm": 1.6492203024515133, + "learning_rate": 6.09876292436116e-05, + "loss": 0.9809, + "step": 7122 + }, + { + "epoch": 4.598450613298903, + "grad_norm": 1.7495377147721276, + "learning_rate": 6.093563608442526e-05, + "loss": 1.1217, + "step": 7123 + }, + { + "epoch": 4.599096191091027, + "grad_norm": 1.807499277246201, + "learning_rate": 6.088365944760323e-05, + "loss": 1.0504, + "step": 7124 + }, + { + "epoch": 4.599741768883151, + "grad_norm": 1.994689431158885, + "learning_rate": 6.083169934278751e-05, + "loss": 0.9123, + "step": 7125 + }, + { + "epoch": 4.600387346675275, + "grad_norm": 1.8134403458964725, + "learning_rate": 6.077975577961739e-05, + "loss": 1.0321, + "step": 7126 + }, + { + "epoch": 4.601032924467399, + "grad_norm": 1.7769924413149025, + "learning_rate": 6.0727828767728974e-05, + "loss": 1.0534, + "step": 7127 + }, + { + "epoch": 4.601678502259523, + "grad_norm": 1.8785153195635702, + "learning_rate": 6.067591831675506e-05, + "loss": 1.1339, + "step": 7128 + }, + { + "epoch": 4.6023240800516465, + "grad_norm": 1.831432193953624, + "learning_rate": 6.062402443632575e-05, + "loss": 1.1915, + "step": 7129 + }, + { + "epoch": 4.60296965784377, + "grad_norm": 1.7193400886002825, + "learning_rate": 6.057214713606791e-05, + "loss": 1.0355, + "step": 7130 + }, + { + "epoch": 4.603615235635894, + "grad_norm": 1.5694901348346915, + "learning_rate": 6.052028642560514e-05, + "loss": 0.9428, + "step": 7131 + }, + { + "epoch": 4.604260813428018, + "grad_norm": 1.881561989121505, + "learning_rate": 6.0468442314558316e-05, + "loss": 1.1054, + "step": 7132 + }, + { + "epoch": 4.604906391220142, + "grad_norm": 2.234362599505628, + "learning_rate": 6.041661481254498e-05, + "loss": 1.1251, + "step": 7133 + }, + { + "epoch": 4.605551969012266, + "grad_norm": 1.6002303199998487, + "learning_rate": 6.036480392917971e-05, + "loss": 0.9109, + "step": 7134 + }, + { + "epoch": 4.60619754680439, + "grad_norm": 1.687439247781763, + "learning_rate": 6.0313009674073944e-05, + "loss": 0.9053, + "step": 7135 + }, + { + "epoch": 4.606843124596514, + "grad_norm": 1.5094559996894792, + "learning_rate": 6.026123205683604e-05, + "loss": 0.8621, + "step": 7136 + }, + { + "epoch": 4.607488702388638, + "grad_norm": 2.0186661727834996, + "learning_rate": 6.020947108707129e-05, + "loss": 1.1927, + "step": 7137 + }, + { + "epoch": 4.608134280180762, + "grad_norm": 2.0422980529169865, + "learning_rate": 6.01577267743819e-05, + "loss": 1.0638, + "step": 7138 + }, + { + "epoch": 4.608779857972886, + "grad_norm": 1.6735772361846275, + "learning_rate": 6.0105999128367e-05, + "loss": 1.0478, + "step": 7139 + }, + { + "epoch": 4.60942543576501, + "grad_norm": 1.739083974195259, + "learning_rate": 6.005428815862255e-05, + "loss": 1.1308, + "step": 7140 + }, + { + "epoch": 4.610071013557134, + "grad_norm": 1.7954741924790825, + "learning_rate": 6.000259387474151e-05, + "loss": 1.0877, + "step": 7141 + }, + { + "epoch": 4.610716591349258, + "grad_norm": 2.4065297468666746, + "learning_rate": 5.995091628631364e-05, + "loss": 1.0414, + "step": 7142 + }, + { + "epoch": 4.6113621691413815, + "grad_norm": 1.8192326453681236, + "learning_rate": 5.989925540292584e-05, + "loss": 1.0514, + "step": 7143 + }, + { + "epoch": 4.6120077469335055, + "grad_norm": 1.7148130722449797, + "learning_rate": 5.984761123416158e-05, + "loss": 1.0716, + "step": 7144 + }, + { + "epoch": 4.612653324725629, + "grad_norm": 1.778330327850945, + "learning_rate": 5.979598378960137e-05, + "loss": 1.0954, + "step": 7145 + }, + { + "epoch": 4.613298902517753, + "grad_norm": 1.6207863571490582, + "learning_rate": 5.974437307882282e-05, + "loss": 1.0916, + "step": 7146 + }, + { + "epoch": 4.613944480309877, + "grad_norm": 1.6533812096143998, + "learning_rate": 5.9692779111400094e-05, + "loss": 0.9602, + "step": 7147 + }, + { + "epoch": 4.614590058102001, + "grad_norm": 1.507810565971133, + "learning_rate": 5.9641201896904374e-05, + "loss": 0.9819, + "step": 7148 + }, + { + "epoch": 4.615235635894125, + "grad_norm": 1.8417808606901023, + "learning_rate": 5.958964144490396e-05, + "loss": 1.1095, + "step": 7149 + }, + { + "epoch": 4.615881213686249, + "grad_norm": 1.8263845496880338, + "learning_rate": 5.9538097764963644e-05, + "loss": 1.0421, + "step": 7150 + }, + { + "epoch": 4.616526791478373, + "grad_norm": 2.574526289502044, + "learning_rate": 5.948657086664545e-05, + "loss": 1.0242, + "step": 7151 + }, + { + "epoch": 4.617172369270497, + "grad_norm": 1.5503036762362268, + "learning_rate": 5.943506075950815e-05, + "loss": 1.0069, + "step": 7152 + }, + { + "epoch": 4.617817947062621, + "grad_norm": 1.7905538785903736, + "learning_rate": 5.938356745310727e-05, + "loss": 0.9347, + "step": 7153 + }, + { + "epoch": 4.618463524854745, + "grad_norm": 2.040004582138679, + "learning_rate": 5.93320909569955e-05, + "loss": 1.1996, + "step": 7154 + }, + { + "epoch": 4.619109102646869, + "grad_norm": 1.7412798969975158, + "learning_rate": 5.928063128072227e-05, + "loss": 0.9805, + "step": 7155 + }, + { + "epoch": 4.619754680438993, + "grad_norm": 1.7997426890397563, + "learning_rate": 5.9229188433833714e-05, + "loss": 1.1189, + "step": 7156 + }, + { + "epoch": 4.6204002582311166, + "grad_norm": 1.9871196629830261, + "learning_rate": 5.9177762425873174e-05, + "loss": 1.1803, + "step": 7157 + }, + { + "epoch": 4.6210458360232405, + "grad_norm": 1.562849464631294, + "learning_rate": 5.912635326638067e-05, + "loss": 1.0029, + "step": 7158 + }, + { + "epoch": 4.621691413815364, + "grad_norm": 1.7941598670814294, + "learning_rate": 5.9074960964893106e-05, + "loss": 1.1082, + "step": 7159 + }, + { + "epoch": 4.622336991607488, + "grad_norm": 1.880159408480632, + "learning_rate": 5.902358553094431e-05, + "loss": 1.082, + "step": 7160 + }, + { + "epoch": 4.622982569399612, + "grad_norm": 1.7560632520822168, + "learning_rate": 5.897222697406497e-05, + "loss": 1.161, + "step": 7161 + }, + { + "epoch": 4.623628147191736, + "grad_norm": 1.7485452333305664, + "learning_rate": 5.89208853037826e-05, + "loss": 1.124, + "step": 7162 + }, + { + "epoch": 4.62427372498386, + "grad_norm": 1.7958882062308557, + "learning_rate": 5.8869560529621635e-05, + "loss": 1.0145, + "step": 7163 + }, + { + "epoch": 4.624919302775984, + "grad_norm": 1.9273851286919166, + "learning_rate": 5.8818252661103335e-05, + "loss": 0.9565, + "step": 7164 + }, + { + "epoch": 4.625564880568108, + "grad_norm": 1.7290334463897834, + "learning_rate": 5.876696170774586e-05, + "loss": 0.9968, + "step": 7165 + }, + { + "epoch": 4.626210458360232, + "grad_norm": 1.7844784161922405, + "learning_rate": 5.8715687679064215e-05, + "loss": 1.0904, + "step": 7166 + }, + { + "epoch": 4.626856036152357, + "grad_norm": 1.8136747303120293, + "learning_rate": 5.86644305845702e-05, + "loss": 1.0893, + "step": 7167 + }, + { + "epoch": 4.627501613944481, + "grad_norm": 1.9021479248812674, + "learning_rate": 5.8613190433772724e-05, + "loss": 0.9599, + "step": 7168 + }, + { + "epoch": 4.628147191736605, + "grad_norm": 1.9323349134493708, + "learning_rate": 5.856196723617716e-05, + "loss": 1.0405, + "step": 7169 + }, + { + "epoch": 4.6287927695287285, + "grad_norm": 1.7608120429461205, + "learning_rate": 5.851076100128598e-05, + "loss": 1.042, + "step": 7170 + }, + { + "epoch": 4.6294383473208525, + "grad_norm": 1.628684030010018, + "learning_rate": 5.845957173859863e-05, + "loss": 0.8652, + "step": 7171 + }, + { + "epoch": 4.630083925112976, + "grad_norm": 1.899936595062756, + "learning_rate": 5.8408399457611066e-05, + "loss": 1.2314, + "step": 7172 + }, + { + "epoch": 4.6307295029051, + "grad_norm": 1.8523258980301862, + "learning_rate": 5.835724416781629e-05, + "loss": 1.0628, + "step": 7173 + }, + { + "epoch": 4.631375080697224, + "grad_norm": 1.8779938222323698, + "learning_rate": 5.830610587870431e-05, + "loss": 1.1229, + "step": 7174 + }, + { + "epoch": 4.632020658489348, + "grad_norm": 1.720690421060392, + "learning_rate": 5.825498459976159e-05, + "loss": 1.0351, + "step": 7175 + }, + { + "epoch": 4.632666236281472, + "grad_norm": 1.8237349495332191, + "learning_rate": 5.8203880340471804e-05, + "loss": 0.9305, + "step": 7176 + }, + { + "epoch": 4.633311814073596, + "grad_norm": 1.57455714286523, + "learning_rate": 5.815279311031535e-05, + "loss": 0.9279, + "step": 7177 + }, + { + "epoch": 4.63395739186572, + "grad_norm": 1.8389576014710263, + "learning_rate": 5.810172291876926e-05, + "loss": 1.0377, + "step": 7178 + }, + { + "epoch": 4.634602969657844, + "grad_norm": 1.6729416492795521, + "learning_rate": 5.805066977530775e-05, + "loss": 1.0178, + "step": 7179 + }, + { + "epoch": 4.635248547449968, + "grad_norm": 1.727366288174452, + "learning_rate": 5.79996336894017e-05, + "loss": 1.0294, + "step": 7180 + }, + { + "epoch": 4.635894125242092, + "grad_norm": 1.6167228204582444, + "learning_rate": 5.7948614670518695e-05, + "loss": 0.9879, + "step": 7181 + }, + { + "epoch": 4.636539703034216, + "grad_norm": 1.8000701743578638, + "learning_rate": 5.789761272812343e-05, + "loss": 1.078, + "step": 7182 + }, + { + "epoch": 4.63718528082634, + "grad_norm": 1.8241195843196152, + "learning_rate": 5.7846627871677245e-05, + "loss": 0.9457, + "step": 7183 + }, + { + "epoch": 4.637830858618464, + "grad_norm": 1.8206433414052574, + "learning_rate": 5.7795660110638377e-05, + "loss": 1.0592, + "step": 7184 + }, + { + "epoch": 4.6384764364105875, + "grad_norm": 1.6282537829881985, + "learning_rate": 5.7744709454461854e-05, + "loss": 0.88, + "step": 7185 + }, + { + "epoch": 4.639122014202711, + "grad_norm": 1.6914676943171214, + "learning_rate": 5.7693775912599563e-05, + "loss": 1.2182, + "step": 7186 + }, + { + "epoch": 4.639767591994835, + "grad_norm": 1.5922184525815941, + "learning_rate": 5.764285949450021e-05, + "loss": 0.9685, + "step": 7187 + }, + { + "epoch": 4.640413169786959, + "grad_norm": 1.5838192826484818, + "learning_rate": 5.759196020960931e-05, + "loss": 0.8673, + "step": 7188 + }, + { + "epoch": 4.641058747579083, + "grad_norm": 1.7604340384059447, + "learning_rate": 5.754107806736921e-05, + "loss": 0.9733, + "step": 7189 + }, + { + "epoch": 4.641704325371207, + "grad_norm": 2.4232115931889804, + "learning_rate": 5.749021307721908e-05, + "loss": 1.1805, + "step": 7190 + }, + { + "epoch": 4.642349903163331, + "grad_norm": 1.5568138157030789, + "learning_rate": 5.7439365248594895e-05, + "loss": 1.0012, + "step": 7191 + }, + { + "epoch": 4.642995480955455, + "grad_norm": 1.7612611051716478, + "learning_rate": 5.73885345909294e-05, + "loss": 0.981, + "step": 7192 + }, + { + "epoch": 4.643641058747579, + "grad_norm": 1.9884777184081062, + "learning_rate": 5.733772111365238e-05, + "loss": 0.9841, + "step": 7193 + }, + { + "epoch": 4.644286636539703, + "grad_norm": 1.7419851925426517, + "learning_rate": 5.728692482619012e-05, + "loss": 0.9862, + "step": 7194 + }, + { + "epoch": 4.644932214331827, + "grad_norm": 2.0327468618513924, + "learning_rate": 5.723614573796582e-05, + "loss": 1.2369, + "step": 7195 + }, + { + "epoch": 4.645577792123951, + "grad_norm": 1.82908739097841, + "learning_rate": 5.7185383858399706e-05, + "loss": 1.1313, + "step": 7196 + }, + { + "epoch": 4.646223369916075, + "grad_norm": 1.5058472953911064, + "learning_rate": 5.7134639196908475e-05, + "loss": 1.0138, + "step": 7197 + }, + { + "epoch": 4.646868947708199, + "grad_norm": 2.0199404845524533, + "learning_rate": 5.70839117629058e-05, + "loss": 1.148, + "step": 7198 + }, + { + "epoch": 4.6475145255003225, + "grad_norm": 1.7286291250916717, + "learning_rate": 5.703320156580228e-05, + "loss": 0.987, + "step": 7199 + }, + { + "epoch": 4.648160103292446, + "grad_norm": 1.877298679630007, + "learning_rate": 5.698250861500499e-05, + "loss": 1.2712, + "step": 7200 + }, + { + "epoch": 4.64880568108457, + "grad_norm": 1.793259401825753, + "learning_rate": 5.6931832919918144e-05, + "loss": 1.1665, + "step": 7201 + }, + { + "epoch": 4.649451258876694, + "grad_norm": 1.8359463398211473, + "learning_rate": 5.688117448994263e-05, + "loss": 1.0518, + "step": 7202 + }, + { + "epoch": 4.650096836668818, + "grad_norm": 1.6932860206139622, + "learning_rate": 5.683053333447593e-05, + "loss": 1.0278, + "step": 7203 + }, + { + "epoch": 4.650742414460942, + "grad_norm": 1.6817049214070046, + "learning_rate": 5.6779909462912694e-05, + "loss": 0.9155, + "step": 7204 + }, + { + "epoch": 4.651387992253067, + "grad_norm": 1.5568426276654586, + "learning_rate": 5.6729302884644135e-05, + "loss": 0.94, + "step": 7205 + }, + { + "epoch": 4.652033570045191, + "grad_norm": 1.7770232559088535, + "learning_rate": 5.667871360905818e-05, + "loss": 0.9437, + "step": 7206 + }, + { + "epoch": 4.652679147837315, + "grad_norm": 1.625099769243703, + "learning_rate": 5.662814164553979e-05, + "loss": 1.0428, + "step": 7207 + }, + { + "epoch": 4.653324725629439, + "grad_norm": 1.8399155026612983, + "learning_rate": 5.6577587003470555e-05, + "loss": 0.9142, + "step": 7208 + }, + { + "epoch": 4.653970303421563, + "grad_norm": 2.247831527174405, + "learning_rate": 5.652704969222889e-05, + "loss": 1.0686, + "step": 7209 + }, + { + "epoch": 4.654615881213687, + "grad_norm": 1.5345261734987865, + "learning_rate": 5.6476529721189974e-05, + "loss": 0.9362, + "step": 7210 + }, + { + "epoch": 4.655261459005811, + "grad_norm": 1.7986013382562114, + "learning_rate": 5.6426027099725795e-05, + "loss": 1.0336, + "step": 7211 + }, + { + "epoch": 4.6559070367979345, + "grad_norm": 1.8337106679825332, + "learning_rate": 5.637554183720512e-05, + "loss": 1.1137, + "step": 7212 + }, + { + "epoch": 4.656552614590058, + "grad_norm": 1.9561837808443172, + "learning_rate": 5.632507394299348e-05, + "loss": 1.0628, + "step": 7213 + }, + { + "epoch": 4.657198192382182, + "grad_norm": 1.6670027399178342, + "learning_rate": 5.627462342645321e-05, + "loss": 0.9355, + "step": 7214 + }, + { + "epoch": 4.657843770174306, + "grad_norm": 1.7893235742018039, + "learning_rate": 5.622419029694338e-05, + "loss": 0.9989, + "step": 7215 + }, + { + "epoch": 4.65848934796643, + "grad_norm": 1.7855351261132673, + "learning_rate": 5.6173774563819875e-05, + "loss": 1.0188, + "step": 7216 + }, + { + "epoch": 4.659134925758554, + "grad_norm": 1.7913113413764925, + "learning_rate": 5.612337623643527e-05, + "loss": 1.0829, + "step": 7217 + }, + { + "epoch": 4.659780503550678, + "grad_norm": 1.5152417246209415, + "learning_rate": 5.6072995324139156e-05, + "loss": 0.9389, + "step": 7218 + }, + { + "epoch": 4.660426081342802, + "grad_norm": 1.7054063197424574, + "learning_rate": 5.602263183627753e-05, + "loss": 1.0717, + "step": 7219 + }, + { + "epoch": 4.661071659134926, + "grad_norm": 1.5968284584342942, + "learning_rate": 5.597228578219338e-05, + "loss": 1.0463, + "step": 7220 + }, + { + "epoch": 4.66171723692705, + "grad_norm": 1.8616131457482157, + "learning_rate": 5.592195717122655e-05, + "loss": 1.0975, + "step": 7221 + }, + { + "epoch": 4.662362814719174, + "grad_norm": 1.896104138721, + "learning_rate": 5.587164601271337e-05, + "loss": 1.1642, + "step": 7222 + }, + { + "epoch": 4.663008392511298, + "grad_norm": 1.6607800825717298, + "learning_rate": 5.582135231598708e-05, + "loss": 1.0476, + "step": 7223 + }, + { + "epoch": 4.663653970303422, + "grad_norm": 1.7245764070017702, + "learning_rate": 5.577107609037785e-05, + "loss": 1.1292, + "step": 7224 + }, + { + "epoch": 4.664299548095546, + "grad_norm": 1.9243217244580546, + "learning_rate": 5.572081734521223e-05, + "loss": 1.0791, + "step": 7225 + }, + { + "epoch": 4.6649451258876695, + "grad_norm": 1.653660523463643, + "learning_rate": 5.567057608981388e-05, + "loss": 1.0488, + "step": 7226 + }, + { + "epoch": 4.6655907036797934, + "grad_norm": 1.9110235634014892, + "learning_rate": 5.5620352333503105e-05, + "loss": 1.0826, + "step": 7227 + }, + { + "epoch": 4.666236281471917, + "grad_norm": 2.0098122942037073, + "learning_rate": 5.557014608559674e-05, + "loss": 1.0675, + "step": 7228 + }, + { + "epoch": 4.666881859264041, + "grad_norm": 1.7734520403096197, + "learning_rate": 5.5519957355408736e-05, + "loss": 1.0136, + "step": 7229 + }, + { + "epoch": 4.667527437056165, + "grad_norm": 1.761947299427667, + "learning_rate": 5.546978615224964e-05, + "loss": 1.0575, + "step": 7230 + }, + { + "epoch": 4.668173014848289, + "grad_norm": 1.839350328218947, + "learning_rate": 5.541963248542656e-05, + "loss": 1.0349, + "step": 7231 + }, + { + "epoch": 4.668818592640413, + "grad_norm": 1.8153746056444617, + "learning_rate": 5.536949636424368e-05, + "loss": 1.1637, + "step": 7232 + }, + { + "epoch": 4.669464170432537, + "grad_norm": 1.8801636607813186, + "learning_rate": 5.531937779800171e-05, + "loss": 1.0068, + "step": 7233 + }, + { + "epoch": 4.670109748224661, + "grad_norm": 1.8936415758402643, + "learning_rate": 5.5269276795998186e-05, + "loss": 1.1595, + "step": 7234 + }, + { + "epoch": 4.670755326016785, + "grad_norm": 1.7850641234781495, + "learning_rate": 5.521919336752734e-05, + "loss": 1.0711, + "step": 7235 + }, + { + "epoch": 4.671400903808909, + "grad_norm": 1.5157610845774743, + "learning_rate": 5.516912752188018e-05, + "loss": 0.9551, + "step": 7236 + }, + { + "epoch": 4.672046481601033, + "grad_norm": 1.6197273387613915, + "learning_rate": 5.511907926834444e-05, + "loss": 0.9733, + "step": 7237 + }, + { + "epoch": 4.672692059393157, + "grad_norm": 1.5242054008766317, + "learning_rate": 5.50690486162046e-05, + "loss": 0.8775, + "step": 7238 + }, + { + "epoch": 4.673337637185281, + "grad_norm": 1.7057444055628521, + "learning_rate": 5.501903557474179e-05, + "loss": 0.9704, + "step": 7239 + }, + { + "epoch": 4.6739832149774045, + "grad_norm": 1.9141364741648177, + "learning_rate": 5.496904015323413e-05, + "loss": 1.1292, + "step": 7240 + }, + { + "epoch": 4.6746287927695285, + "grad_norm": 1.7897638339847632, + "learning_rate": 5.4919062360956114e-05, + "loss": 0.9313, + "step": 7241 + }, + { + "epoch": 4.675274370561652, + "grad_norm": 1.7913454642030702, + "learning_rate": 5.486910220717914e-05, + "loss": 0.9405, + "step": 7242 + }, + { + "epoch": 4.675919948353776, + "grad_norm": 1.586402610683998, + "learning_rate": 5.4819159701171515e-05, + "loss": 0.9014, + "step": 7243 + }, + { + "epoch": 4.6765655261459, + "grad_norm": 1.7405933161752611, + "learning_rate": 5.476923485219792e-05, + "loss": 0.9193, + "step": 7244 + }, + { + "epoch": 4.677211103938024, + "grad_norm": 1.6767744423420774, + "learning_rate": 5.4719327669519944e-05, + "loss": 0.9934, + "step": 7245 + }, + { + "epoch": 4.677856681730148, + "grad_norm": 1.7405754459721552, + "learning_rate": 5.466943816239601e-05, + "loss": 1.0339, + "step": 7246 + }, + { + "epoch": 4.678502259522272, + "grad_norm": 1.6592529776687517, + "learning_rate": 5.461956634008103e-05, + "loss": 0.8557, + "step": 7247 + }, + { + "epoch": 4.679147837314396, + "grad_norm": 1.6704686276071778, + "learning_rate": 5.4569712211826724e-05, + "loss": 1.0622, + "step": 7248 + }, + { + "epoch": 4.67979341510652, + "grad_norm": 1.6119744787791466, + "learning_rate": 5.451987578688168e-05, + "loss": 0.934, + "step": 7249 + }, + { + "epoch": 4.680438992898644, + "grad_norm": 1.6423482222716168, + "learning_rate": 5.447005707449091e-05, + "loss": 0.9304, + "step": 7250 + }, + { + "epoch": 4.681084570690768, + "grad_norm": 1.6264867273285835, + "learning_rate": 5.4420256083896436e-05, + "loss": 0.9509, + "step": 7251 + }, + { + "epoch": 4.681730148482892, + "grad_norm": 1.774484246221638, + "learning_rate": 5.4370472824336856e-05, + "loss": 1.0868, + "step": 7252 + }, + { + "epoch": 4.6823757262750165, + "grad_norm": 1.6821449817379677, + "learning_rate": 5.4320707305047324e-05, + "loss": 0.9674, + "step": 7253 + }, + { + "epoch": 4.6830213040671405, + "grad_norm": 1.745694832674836, + "learning_rate": 5.4270959535260025e-05, + "loss": 1.0582, + "step": 7254 + }, + { + "epoch": 4.683666881859264, + "grad_norm": 1.6633269360894083, + "learning_rate": 5.422122952420369e-05, + "loss": 1.0299, + "step": 7255 + }, + { + "epoch": 4.684312459651388, + "grad_norm": 1.7725192565288999, + "learning_rate": 5.4171517281103585e-05, + "loss": 1.0744, + "step": 7256 + }, + { + "epoch": 4.684958037443512, + "grad_norm": 1.9075776147745647, + "learning_rate": 5.412182281518201e-05, + "loss": 1.1198, + "step": 7257 + }, + { + "epoch": 4.685603615235636, + "grad_norm": 1.898183967220348, + "learning_rate": 5.4072146135657754e-05, + "loss": 1.153, + "step": 7258 + }, + { + "epoch": 4.68624919302776, + "grad_norm": 1.8632523339880918, + "learning_rate": 5.4022487251746355e-05, + "loss": 1.2494, + "step": 7259 + }, + { + "epoch": 4.686894770819884, + "grad_norm": 1.8210405193820467, + "learning_rate": 5.397284617266005e-05, + "loss": 1.0463, + "step": 7260 + }, + { + "epoch": 4.687540348612008, + "grad_norm": 1.8408612453598614, + "learning_rate": 5.392322290760776e-05, + "loss": 0.9965, + "step": 7261 + }, + { + "epoch": 4.688185926404132, + "grad_norm": 1.564990634693941, + "learning_rate": 5.387361746579515e-05, + "loss": 0.9208, + "step": 7262 + }, + { + "epoch": 4.688831504196256, + "grad_norm": 1.8718017572128893, + "learning_rate": 5.382402985642452e-05, + "loss": 1.0242, + "step": 7263 + }, + { + "epoch": 4.68947708198838, + "grad_norm": 1.9655478549796737, + "learning_rate": 5.3774460088694835e-05, + "loss": 1.11, + "step": 7264 + }, + { + "epoch": 4.690122659780504, + "grad_norm": 1.8400812997776022, + "learning_rate": 5.372490817180198e-05, + "loss": 0.9757, + "step": 7265 + }, + { + "epoch": 4.690768237572628, + "grad_norm": 1.803247538602158, + "learning_rate": 5.3675374114938156e-05, + "loss": 0.8744, + "step": 7266 + }, + { + "epoch": 4.6914138153647515, + "grad_norm": 1.8149472665341604, + "learning_rate": 5.362585792729248e-05, + "loss": 0.9035, + "step": 7267 + }, + { + "epoch": 4.6920593931568755, + "grad_norm": 1.8698871728032527, + "learning_rate": 5.3576359618050836e-05, + "loss": 0.9445, + "step": 7268 + }, + { + "epoch": 4.692704970948999, + "grad_norm": 1.6167344519735594, + "learning_rate": 5.352687919639557e-05, + "loss": 0.9432, + "step": 7269 + }, + { + "epoch": 4.693350548741123, + "grad_norm": 1.7020526770257063, + "learning_rate": 5.347741667150576e-05, + "loss": 0.9797, + "step": 7270 + }, + { + "epoch": 4.693996126533247, + "grad_norm": 1.7283096087697323, + "learning_rate": 5.342797205255743e-05, + "loss": 1.0561, + "step": 7271 + }, + { + "epoch": 4.694641704325371, + "grad_norm": 1.9132994836614676, + "learning_rate": 5.337854534872286e-05, + "loss": 1.0678, + "step": 7272 + }, + { + "epoch": 4.695287282117495, + "grad_norm": 1.7756613357996285, + "learning_rate": 5.332913656917125e-05, + "loss": 0.983, + "step": 7273 + }, + { + "epoch": 4.695932859909619, + "grad_norm": 1.857812358843459, + "learning_rate": 5.32797457230686e-05, + "loss": 1.0728, + "step": 7274 + }, + { + "epoch": 4.696578437701743, + "grad_norm": 1.7784848687646404, + "learning_rate": 5.323037281957719e-05, + "loss": 1.0466, + "step": 7275 + }, + { + "epoch": 4.697224015493867, + "grad_norm": 1.7554385898338174, + "learning_rate": 5.318101786785638e-05, + "loss": 1.0838, + "step": 7276 + }, + { + "epoch": 4.697869593285991, + "grad_norm": 1.6724471343260179, + "learning_rate": 5.313168087706203e-05, + "loss": 0.8636, + "step": 7277 + }, + { + "epoch": 4.698515171078115, + "grad_norm": 1.8420248434281812, + "learning_rate": 5.308236185634649e-05, + "loss": 1.2008, + "step": 7278 + }, + { + "epoch": 4.699160748870239, + "grad_norm": 2.062441265111235, + "learning_rate": 5.3033060814859135e-05, + "loss": 1.1719, + "step": 7279 + }, + { + "epoch": 4.699806326662363, + "grad_norm": 1.6909565481846087, + "learning_rate": 5.29837777617458e-05, + "loss": 0.9637, + "step": 7280 + }, + { + "epoch": 4.700451904454487, + "grad_norm": 1.7659403528581332, + "learning_rate": 5.293451270614884e-05, + "loss": 0.9493, + "step": 7281 + }, + { + "epoch": 4.7010974822466105, + "grad_norm": 1.940517359767267, + "learning_rate": 5.2885265657207616e-05, + "loss": 1.0865, + "step": 7282 + }, + { + "epoch": 4.701743060038734, + "grad_norm": 1.5657145472709697, + "learning_rate": 5.283603662405788e-05, + "loss": 0.9639, + "step": 7283 + }, + { + "epoch": 4.702388637830858, + "grad_norm": 2.065393487610819, + "learning_rate": 5.278682561583217e-05, + "loss": 1.1816, + "step": 7284 + }, + { + "epoch": 4.703034215622982, + "grad_norm": 1.6074427199026384, + "learning_rate": 5.2737632641659586e-05, + "loss": 0.9598, + "step": 7285 + }, + { + "epoch": 4.703679793415106, + "grad_norm": 1.5661499636892462, + "learning_rate": 5.2688457710665994e-05, + "loss": 0.9405, + "step": 7286 + }, + { + "epoch": 4.70432537120723, + "grad_norm": 1.7707393397821718, + "learning_rate": 5.2639300831973815e-05, + "loss": 0.9821, + "step": 7287 + }, + { + "epoch": 4.704970948999354, + "grad_norm": 1.6235344485395604, + "learning_rate": 5.259016201470217e-05, + "loss": 1.0389, + "step": 7288 + }, + { + "epoch": 4.705616526791478, + "grad_norm": 1.7552833675583959, + "learning_rate": 5.254104126796679e-05, + "loss": 0.9851, + "step": 7289 + }, + { + "epoch": 4.706262104583602, + "grad_norm": 1.5681730442354158, + "learning_rate": 5.24919386008802e-05, + "loss": 0.9203, + "step": 7290 + }, + { + "epoch": 4.706907682375727, + "grad_norm": 1.8863720056356565, + "learning_rate": 5.2442854022551324e-05, + "loss": 1.0419, + "step": 7291 + }, + { + "epoch": 4.707553260167851, + "grad_norm": 1.5645159888572937, + "learning_rate": 5.239378754208585e-05, + "loss": 0.9757, + "step": 7292 + }, + { + "epoch": 4.708198837959975, + "grad_norm": 2.400212223854826, + "learning_rate": 5.234473916858629e-05, + "loss": 1.0637, + "step": 7293 + }, + { + "epoch": 4.7088444157520986, + "grad_norm": 2.5559711278258406, + "learning_rate": 5.229570891115145e-05, + "loss": 1.1464, + "step": 7294 + }, + { + "epoch": 4.7094899935442225, + "grad_norm": 1.5608155974931863, + "learning_rate": 5.2246696778876975e-05, + "loss": 0.8547, + "step": 7295 + }, + { + "epoch": 4.710135571336346, + "grad_norm": 1.8355779308320106, + "learning_rate": 5.219770278085525e-05, + "loss": 1.064, + "step": 7296 + }, + { + "epoch": 4.71078114912847, + "grad_norm": 1.6317840137737856, + "learning_rate": 5.214872692617505e-05, + "loss": 0.9108, + "step": 7297 + }, + { + "epoch": 4.711426726920594, + "grad_norm": 1.8236033622007077, + "learning_rate": 5.2099769223921875e-05, + "loss": 0.9737, + "step": 7298 + }, + { + "epoch": 4.712072304712718, + "grad_norm": 1.9663940736588086, + "learning_rate": 5.205082968317807e-05, + "loss": 1.0036, + "step": 7299 + }, + { + "epoch": 4.712717882504842, + "grad_norm": 1.6142324668292591, + "learning_rate": 5.20019083130222e-05, + "loss": 0.9175, + "step": 7300 + }, + { + "epoch": 4.713363460296966, + "grad_norm": 1.8807833218734658, + "learning_rate": 5.195300512252984e-05, + "loss": 1.0685, + "step": 7301 + }, + { + "epoch": 4.71400903808909, + "grad_norm": 1.6694828976165001, + "learning_rate": 5.1904120120773046e-05, + "loss": 0.9196, + "step": 7302 + }, + { + "epoch": 4.714654615881214, + "grad_norm": 1.6583201786994803, + "learning_rate": 5.185525331682035e-05, + "loss": 0.9007, + "step": 7303 + }, + { + "epoch": 4.715300193673338, + "grad_norm": 1.7070907930661716, + "learning_rate": 5.180640471973718e-05, + "loss": 0.9944, + "step": 7304 + }, + { + "epoch": 4.715945771465462, + "grad_norm": 1.839337781550116, + "learning_rate": 5.175757433858549e-05, + "loss": 1.0856, + "step": 7305 + }, + { + "epoch": 4.716591349257586, + "grad_norm": 1.612178495739189, + "learning_rate": 5.1708762182423654e-05, + "loss": 0.9401, + "step": 7306 + }, + { + "epoch": 4.71723692704971, + "grad_norm": 1.9069771856406783, + "learning_rate": 5.1659968260306976e-05, + "loss": 0.9547, + "step": 7307 + }, + { + "epoch": 4.717882504841834, + "grad_norm": 1.5649237431417429, + "learning_rate": 5.16111925812872e-05, + "loss": 0.8941, + "step": 7308 + }, + { + "epoch": 4.7185280826339575, + "grad_norm": 1.9012001460195176, + "learning_rate": 5.156243515441271e-05, + "loss": 1.1407, + "step": 7309 + }, + { + "epoch": 4.719173660426081, + "grad_norm": 1.7871837443658096, + "learning_rate": 5.1513695988728536e-05, + "loss": 1.0551, + "step": 7310 + }, + { + "epoch": 4.719819238218205, + "grad_norm": 1.6547946258289805, + "learning_rate": 5.146497509327626e-05, + "loss": 0.8777, + "step": 7311 + }, + { + "epoch": 4.720464816010329, + "grad_norm": 1.8711612165296108, + "learning_rate": 5.1416272477094155e-05, + "loss": 1.007, + "step": 7312 + }, + { + "epoch": 4.721110393802453, + "grad_norm": 1.5801529396339566, + "learning_rate": 5.1367588149217024e-05, + "loss": 0.8809, + "step": 7313 + }, + { + "epoch": 4.721755971594577, + "grad_norm": 1.6832254809895695, + "learning_rate": 5.131892211867631e-05, + "loss": 0.9356, + "step": 7314 + }, + { + "epoch": 4.722401549386701, + "grad_norm": 1.7035166132267499, + "learning_rate": 5.1270274394500186e-05, + "loss": 0.8952, + "step": 7315 + }, + { + "epoch": 4.723047127178825, + "grad_norm": 1.8159730799037326, + "learning_rate": 5.1221644985713143e-05, + "loss": 1.0549, + "step": 7316 + }, + { + "epoch": 4.723692704970949, + "grad_norm": 1.779261995940128, + "learning_rate": 5.11730339013365e-05, + "loss": 0.9345, + "step": 7317 + }, + { + "epoch": 4.724338282763073, + "grad_norm": 1.695254298702834, + "learning_rate": 5.1124441150388206e-05, + "loss": 0.8695, + "step": 7318 + }, + { + "epoch": 4.724983860555197, + "grad_norm": 1.8251964514207983, + "learning_rate": 5.10758667418826e-05, + "loss": 1.1072, + "step": 7319 + }, + { + "epoch": 4.725629438347321, + "grad_norm": 2.3291034486364515, + "learning_rate": 5.1027310684830745e-05, + "loss": 1.1119, + "step": 7320 + }, + { + "epoch": 4.726275016139445, + "grad_norm": 1.7585941180338605, + "learning_rate": 5.097877298824043e-05, + "loss": 0.9078, + "step": 7321 + }, + { + "epoch": 4.726920593931569, + "grad_norm": 1.6517300710782665, + "learning_rate": 5.093025366111576e-05, + "loss": 1.0547, + "step": 7322 + }, + { + "epoch": 4.7275661717236925, + "grad_norm": 1.8805190549903252, + "learning_rate": 5.088175271245757e-05, + "loss": 1.088, + "step": 7323 + }, + { + "epoch": 4.7282117495158165, + "grad_norm": 1.7292772277123052, + "learning_rate": 5.083327015126344e-05, + "loss": 0.9919, + "step": 7324 + }, + { + "epoch": 4.72885732730794, + "grad_norm": 1.7831383244365187, + "learning_rate": 5.078480598652718e-05, + "loss": 1.0844, + "step": 7325 + }, + { + "epoch": 4.729502905100064, + "grad_norm": 1.8767084746470097, + "learning_rate": 5.073636022723956e-05, + "loss": 1.0429, + "step": 7326 + }, + { + "epoch": 4.730148482892188, + "grad_norm": 1.9755037498627621, + "learning_rate": 5.068793288238778e-05, + "loss": 0.9693, + "step": 7327 + }, + { + "epoch": 4.730794060684312, + "grad_norm": 1.8424547122661095, + "learning_rate": 5.063952396095543e-05, + "loss": 0.8693, + "step": 7328 + }, + { + "epoch": 4.731439638476436, + "grad_norm": 1.9538598880754345, + "learning_rate": 5.059113347192304e-05, + "loss": 1.0045, + "step": 7329 + }, + { + "epoch": 4.73208521626856, + "grad_norm": 1.608058918680016, + "learning_rate": 5.054276142426756e-05, + "loss": 0.9991, + "step": 7330 + }, + { + "epoch": 4.732730794060684, + "grad_norm": 1.7527928232164396, + "learning_rate": 5.049440782696233e-05, + "loss": 1.0456, + "step": 7331 + }, + { + "epoch": 4.733376371852808, + "grad_norm": 1.9178586407146652, + "learning_rate": 5.044607268897761e-05, + "loss": 1.0298, + "step": 7332 + }, + { + "epoch": 4.734021949644932, + "grad_norm": 1.735239651800332, + "learning_rate": 5.0397756019280035e-05, + "loss": 0.9835, + "step": 7333 + }, + { + "epoch": 4.734667527437056, + "grad_norm": 1.723343552692624, + "learning_rate": 5.03494578268328e-05, + "loss": 1.0035, + "step": 7334 + }, + { + "epoch": 4.73531310522918, + "grad_norm": 1.8993742863122034, + "learning_rate": 5.030117812059578e-05, + "loss": 0.9887, + "step": 7335 + }, + { + "epoch": 4.735958683021304, + "grad_norm": 1.8417626972317591, + "learning_rate": 5.025291690952533e-05, + "loss": 1.1052, + "step": 7336 + }, + { + "epoch": 4.736604260813428, + "grad_norm": 1.78540753468366, + "learning_rate": 5.020467420257441e-05, + "loss": 1.08, + "step": 7337 + }, + { + "epoch": 4.7372498386055515, + "grad_norm": 1.7476255521578, + "learning_rate": 5.015645000869253e-05, + "loss": 1.0491, + "step": 7338 + }, + { + "epoch": 4.737895416397676, + "grad_norm": 1.8292874496015505, + "learning_rate": 5.010824433682575e-05, + "loss": 1.0443, + "step": 7339 + }, + { + "epoch": 4.7385409941898, + "grad_norm": 1.9125907350593359, + "learning_rate": 5.0060057195916865e-05, + "loss": 1.2129, + "step": 7340 + }, + { + "epoch": 4.739186571981924, + "grad_norm": 1.99111606309822, + "learning_rate": 5.0011888594904946e-05, + "loss": 1.0079, + "step": 7341 + }, + { + "epoch": 4.739832149774048, + "grad_norm": 1.76875547705138, + "learning_rate": 4.996373854272575e-05, + "loss": 0.9189, + "step": 7342 + }, + { + "epoch": 4.740477727566172, + "grad_norm": 1.8551308829947266, + "learning_rate": 4.991560704831178e-05, + "loss": 0.8779, + "step": 7343 + }, + { + "epoch": 4.741123305358296, + "grad_norm": 1.5365088092276469, + "learning_rate": 4.986749412059177e-05, + "loss": 0.8667, + "step": 7344 + }, + { + "epoch": 4.74176888315042, + "grad_norm": 1.695030039392098, + "learning_rate": 4.981939976849118e-05, + "loss": 1.02, + "step": 7345 + }, + { + "epoch": 4.742414460942544, + "grad_norm": 1.4555272003801756, + "learning_rate": 4.977132400093213e-05, + "loss": 0.8686, + "step": 7346 + }, + { + "epoch": 4.743060038734668, + "grad_norm": 1.5980229190580901, + "learning_rate": 4.972326682683301e-05, + "loss": 0.8782, + "step": 7347 + }, + { + "epoch": 4.743705616526792, + "grad_norm": 1.9164305004371067, + "learning_rate": 4.967522825510905e-05, + "loss": 1.0894, + "step": 7348 + }, + { + "epoch": 4.744351194318916, + "grad_norm": 1.6132004389446495, + "learning_rate": 4.9627208294671914e-05, + "loss": 0.9205, + "step": 7349 + }, + { + "epoch": 4.7449967721110395, + "grad_norm": 1.962983160402659, + "learning_rate": 4.957920695442967e-05, + "loss": 0.9974, + "step": 7350 + }, + { + "epoch": 4.7456423499031635, + "grad_norm": 1.6563878025428647, + "learning_rate": 4.9531224243287185e-05, + "loss": 0.9432, + "step": 7351 + }, + { + "epoch": 4.746287927695287, + "grad_norm": 1.8049433336677365, + "learning_rate": 4.9483260170145756e-05, + "loss": 1.1087, + "step": 7352 + }, + { + "epoch": 4.746933505487411, + "grad_norm": 1.7292094335550279, + "learning_rate": 4.943531474390309e-05, + "loss": 0.993, + "step": 7353 + }, + { + "epoch": 4.747579083279535, + "grad_norm": 1.7586606686569346, + "learning_rate": 4.938738797345368e-05, + "loss": 0.9327, + "step": 7354 + }, + { + "epoch": 4.748224661071659, + "grad_norm": 1.683820560689177, + "learning_rate": 4.933947986768847e-05, + "loss": 1.0234, + "step": 7355 + }, + { + "epoch": 4.748870238863783, + "grad_norm": 1.7969508212525866, + "learning_rate": 4.929159043549473e-05, + "loss": 0.9287, + "step": 7356 + }, + { + "epoch": 4.749515816655907, + "grad_norm": 1.6805694189455824, + "learning_rate": 4.924371968575661e-05, + "loss": 0.9915, + "step": 7357 + }, + { + "epoch": 4.750161394448031, + "grad_norm": 1.9559166135603316, + "learning_rate": 4.9195867627354604e-05, + "loss": 0.9974, + "step": 7358 + }, + { + "epoch": 4.750806972240155, + "grad_norm": 1.5545512156385697, + "learning_rate": 4.914803426916572e-05, + "loss": 0.8944, + "step": 7359 + }, + { + "epoch": 4.751452550032279, + "grad_norm": 1.6803600003600454, + "learning_rate": 4.9100219620063584e-05, + "loss": 0.9607, + "step": 7360 + }, + { + "epoch": 4.752098127824403, + "grad_norm": 1.8626110281091575, + "learning_rate": 4.9052423688918284e-05, + "loss": 1.0193, + "step": 7361 + }, + { + "epoch": 4.752743705616527, + "grad_norm": 1.7902420021974244, + "learning_rate": 4.9004646484596474e-05, + "loss": 0.983, + "step": 7362 + }, + { + "epoch": 4.753389283408651, + "grad_norm": 1.893463530373879, + "learning_rate": 4.895688801596132e-05, + "loss": 1.1576, + "step": 7363 + }, + { + "epoch": 4.754034861200775, + "grad_norm": 1.8362835436136804, + "learning_rate": 4.890914829187246e-05, + "loss": 0.9669, + "step": 7364 + }, + { + "epoch": 4.7546804389928985, + "grad_norm": 1.766944868265846, + "learning_rate": 4.886142732118627e-05, + "loss": 1.0024, + "step": 7365 + }, + { + "epoch": 4.755326016785022, + "grad_norm": 1.8974250175414726, + "learning_rate": 4.881372511275531e-05, + "loss": 0.9718, + "step": 7366 + }, + { + "epoch": 4.755971594577146, + "grad_norm": 2.043389579468726, + "learning_rate": 4.876604167542887e-05, + "loss": 1.1512, + "step": 7367 + }, + { + "epoch": 4.75661717236927, + "grad_norm": 1.6891768003278544, + "learning_rate": 4.871837701805284e-05, + "loss": 1.0237, + "step": 7368 + }, + { + "epoch": 4.757262750161394, + "grad_norm": 1.6666655776869759, + "learning_rate": 4.8670731149469365e-05, + "loss": 0.964, + "step": 7369 + }, + { + "epoch": 4.757908327953518, + "grad_norm": 1.8336798212535248, + "learning_rate": 4.862310407851728e-05, + "loss": 0.9726, + "step": 7370 + }, + { + "epoch": 4.758553905745642, + "grad_norm": 1.8753609459933978, + "learning_rate": 4.8575495814032004e-05, + "loss": 0.9129, + "step": 7371 + }, + { + "epoch": 4.759199483537766, + "grad_norm": 1.7041550238205534, + "learning_rate": 4.852790636484519e-05, + "loss": 1.0505, + "step": 7372 + }, + { + "epoch": 4.75984506132989, + "grad_norm": 2.114148633154453, + "learning_rate": 4.848033573978533e-05, + "loss": 1.2081, + "step": 7373 + }, + { + "epoch": 4.760490639122014, + "grad_norm": 1.6369641660973795, + "learning_rate": 4.843278394767726e-05, + "loss": 1.0306, + "step": 7374 + }, + { + "epoch": 4.761136216914138, + "grad_norm": 1.6514299479470957, + "learning_rate": 4.838525099734217e-05, + "loss": 0.984, + "step": 7375 + }, + { + "epoch": 4.761781794706262, + "grad_norm": 1.6721059691705116, + "learning_rate": 4.8337736897598105e-05, + "loss": 0.9786, + "step": 7376 + }, + { + "epoch": 4.7624273724983865, + "grad_norm": 1.7330019159458017, + "learning_rate": 4.8290241657259375e-05, + "loss": 0.9858, + "step": 7377 + }, + { + "epoch": 4.7630729502905105, + "grad_norm": 1.5576871784150674, + "learning_rate": 4.824276528513672e-05, + "loss": 0.9144, + "step": 7378 + }, + { + "epoch": 4.763718528082634, + "grad_norm": 1.6180017773159983, + "learning_rate": 4.819530779003764e-05, + "loss": 0.8848, + "step": 7379 + }, + { + "epoch": 4.764364105874758, + "grad_norm": 1.847546017405836, + "learning_rate": 4.814786918076598e-05, + "loss": 1.0897, + "step": 7380 + }, + { + "epoch": 4.765009683666882, + "grad_norm": 1.8732008984982045, + "learning_rate": 4.810044946612198e-05, + "loss": 1.0776, + "step": 7381 + }, + { + "epoch": 4.765655261459006, + "grad_norm": 1.7398902248260941, + "learning_rate": 4.805304865490261e-05, + "loss": 0.9234, + "step": 7382 + }, + { + "epoch": 4.76630083925113, + "grad_norm": 1.6780245411319257, + "learning_rate": 4.800566675590118e-05, + "loss": 0.9872, + "step": 7383 + }, + { + "epoch": 4.766946417043254, + "grad_norm": 1.8487475877572477, + "learning_rate": 4.795830377790751e-05, + "loss": 1.0117, + "step": 7384 + }, + { + "epoch": 4.767591994835378, + "grad_norm": 1.6205763533004482, + "learning_rate": 4.7910959729707924e-05, + "loss": 0.9132, + "step": 7385 + }, + { + "epoch": 4.768237572627502, + "grad_norm": 1.9914747574704323, + "learning_rate": 4.7863634620085255e-05, + "loss": 1.153, + "step": 7386 + }, + { + "epoch": 4.768883150419626, + "grad_norm": 1.9139040416647959, + "learning_rate": 4.7816328457818775e-05, + "loss": 1.0242, + "step": 7387 + }, + { + "epoch": 4.76952872821175, + "grad_norm": 1.758201567996156, + "learning_rate": 4.7769041251684275e-05, + "loss": 0.9642, + "step": 7388 + }, + { + "epoch": 4.770174306003874, + "grad_norm": 1.7392448719647322, + "learning_rate": 4.772177301045399e-05, + "loss": 0.9652, + "step": 7389 + }, + { + "epoch": 4.770819883795998, + "grad_norm": 1.960800379641239, + "learning_rate": 4.76745237428968e-05, + "loss": 1.2387, + "step": 7390 + }, + { + "epoch": 4.771465461588122, + "grad_norm": 1.767503668433727, + "learning_rate": 4.76272934577778e-05, + "loss": 0.9227, + "step": 7391 + }, + { + "epoch": 4.7721110393802455, + "grad_norm": 1.763290703359507, + "learning_rate": 4.75800821638587e-05, + "loss": 1.0207, + "step": 7392 + }, + { + "epoch": 4.772756617172369, + "grad_norm": 1.855688840071759, + "learning_rate": 4.7532889869897806e-05, + "loss": 1.0143, + "step": 7393 + }, + { + "epoch": 4.773402194964493, + "grad_norm": 2.082019300281402, + "learning_rate": 4.748571658464966e-05, + "loss": 1.1582, + "step": 7394 + }, + { + "epoch": 4.774047772756617, + "grad_norm": 1.742293102942273, + "learning_rate": 4.743856231686539e-05, + "loss": 1.0044, + "step": 7395 + }, + { + "epoch": 4.774693350548741, + "grad_norm": 1.7697727607123788, + "learning_rate": 4.739142707529273e-05, + "loss": 0.9183, + "step": 7396 + }, + { + "epoch": 4.775338928340865, + "grad_norm": 1.5775207550203458, + "learning_rate": 4.734431086867559e-05, + "loss": 0.8874, + "step": 7397 + }, + { + "epoch": 4.775984506132989, + "grad_norm": 1.6910782751911047, + "learning_rate": 4.7297213705754635e-05, + "loss": 0.9399, + "step": 7398 + }, + { + "epoch": 4.776630083925113, + "grad_norm": 2.060954953423787, + "learning_rate": 4.7250135595266904e-05, + "loss": 1.1611, + "step": 7399 + }, + { + "epoch": 4.777275661717237, + "grad_norm": 1.8521453508757635, + "learning_rate": 4.720307654594571e-05, + "loss": 0.9442, + "step": 7400 + }, + { + "epoch": 4.777921239509361, + "grad_norm": 1.4996710671545135, + "learning_rate": 4.715603656652115e-05, + "loss": 0.8582, + "step": 7401 + }, + { + "epoch": 4.778566817301485, + "grad_norm": 1.7866666587884783, + "learning_rate": 4.7109015665719616e-05, + "loss": 0.9967, + "step": 7402 + }, + { + "epoch": 4.779212395093609, + "grad_norm": 1.8459506288568717, + "learning_rate": 4.706201385226384e-05, + "loss": 1.0419, + "step": 7403 + }, + { + "epoch": 4.779857972885733, + "grad_norm": 1.6188722075772355, + "learning_rate": 4.701503113487331e-05, + "loss": 1.0004, + "step": 7404 + }, + { + "epoch": 4.780503550677857, + "grad_norm": 1.5632127964803177, + "learning_rate": 4.696806752226377e-05, + "loss": 0.8603, + "step": 7405 + }, + { + "epoch": 4.7811491284699805, + "grad_norm": 1.870898544891731, + "learning_rate": 4.692112302314732e-05, + "loss": 1.0716, + "step": 7406 + }, + { + "epoch": 4.7817947062621045, + "grad_norm": 1.8446643888422938, + "learning_rate": 4.687419764623282e-05, + "loss": 0.9541, + "step": 7407 + }, + { + "epoch": 4.782440284054228, + "grad_norm": 1.8459153053769408, + "learning_rate": 4.6827291400225345e-05, + "loss": 0.991, + "step": 7408 + }, + { + "epoch": 4.783085861846352, + "grad_norm": 1.6943020328992286, + "learning_rate": 4.678040429382651e-05, + "loss": 0.9646, + "step": 7409 + }, + { + "epoch": 4.783731439638476, + "grad_norm": 1.88863790390736, + "learning_rate": 4.673353633573434e-05, + "loss": 1.003, + "step": 7410 + }, + { + "epoch": 4.7843770174306, + "grad_norm": 1.7034754322336783, + "learning_rate": 4.668668753464334e-05, + "loss": 0.9937, + "step": 7411 + }, + { + "epoch": 4.785022595222724, + "grad_norm": 1.5381688316093889, + "learning_rate": 4.663985789924444e-05, + "loss": 0.8207, + "step": 7412 + }, + { + "epoch": 4.785668173014848, + "grad_norm": 1.9407185856269784, + "learning_rate": 4.6593047438225026e-05, + "loss": 1.0932, + "step": 7413 + }, + { + "epoch": 4.786313750806972, + "grad_norm": 1.886962647431995, + "learning_rate": 4.6546256160268905e-05, + "loss": 1.072, + "step": 7414 + }, + { + "epoch": 4.786959328599096, + "grad_norm": 1.8278439372403676, + "learning_rate": 4.649948407405644e-05, + "loss": 1.2275, + "step": 7415 + }, + { + "epoch": 4.78760490639122, + "grad_norm": 1.7124077897087253, + "learning_rate": 4.645273118826423e-05, + "loss": 0.9644, + "step": 7416 + }, + { + "epoch": 4.788250484183344, + "grad_norm": 1.5672097189447003, + "learning_rate": 4.6405997511565394e-05, + "loss": 0.9226, + "step": 7417 + }, + { + "epoch": 4.788896061975468, + "grad_norm": 1.7609852765844813, + "learning_rate": 4.635928305262968e-05, + "loss": 0.9357, + "step": 7418 + }, + { + "epoch": 4.789541639767592, + "grad_norm": 1.8772410738765386, + "learning_rate": 4.6312587820122946e-05, + "loss": 1.0719, + "step": 7419 + }, + { + "epoch": 4.7901872175597155, + "grad_norm": 1.6608742852962184, + "learning_rate": 4.626591182270767e-05, + "loss": 0.8968, + "step": 7420 + }, + { + "epoch": 4.7908327953518395, + "grad_norm": 1.8444765754502486, + "learning_rate": 4.621925506904285e-05, + "loss": 1.0075, + "step": 7421 + }, + { + "epoch": 4.791478373143963, + "grad_norm": 1.8515097324532863, + "learning_rate": 4.61726175677836e-05, + "loss": 1.0538, + "step": 7422 + }, + { + "epoch": 4.792123950936087, + "grad_norm": 1.787705607255184, + "learning_rate": 4.612599932758183e-05, + "loss": 0.8846, + "step": 7423 + }, + { + "epoch": 4.792769528728211, + "grad_norm": 1.842995558391237, + "learning_rate": 4.607940035708571e-05, + "loss": 0.9423, + "step": 7424 + }, + { + "epoch": 4.793415106520336, + "grad_norm": 1.744698633094068, + "learning_rate": 4.603282066493966e-05, + "loss": 0.8557, + "step": 7425 + }, + { + "epoch": 4.79406068431246, + "grad_norm": 1.5953898935026845, + "learning_rate": 4.598626025978486e-05, + "loss": 0.9548, + "step": 7426 + }, + { + "epoch": 4.794706262104584, + "grad_norm": 1.7617086994523083, + "learning_rate": 4.5939719150258754e-05, + "loss": 0.9475, + "step": 7427 + }, + { + "epoch": 4.795351839896708, + "grad_norm": 1.9105616334404307, + "learning_rate": 4.589319734499504e-05, + "loss": 0.824, + "step": 7428 + }, + { + "epoch": 4.795997417688832, + "grad_norm": 1.8179755805219942, + "learning_rate": 4.584669485262414e-05, + "loss": 0.9793, + "step": 7429 + }, + { + "epoch": 4.796642995480956, + "grad_norm": 1.6163376640977998, + "learning_rate": 4.5800211681772725e-05, + "loss": 0.8754, + "step": 7430 + }, + { + "epoch": 4.79728857327308, + "grad_norm": 1.578080031469166, + "learning_rate": 4.575374784106388e-05, + "loss": 0.902, + "step": 7431 + }, + { + "epoch": 4.797934151065204, + "grad_norm": 1.7499011534194955, + "learning_rate": 4.5707303339117145e-05, + "loss": 0.9775, + "step": 7432 + }, + { + "epoch": 4.7985797288573275, + "grad_norm": 1.7043754908205677, + "learning_rate": 4.5660878184548443e-05, + "loss": 1.0083, + "step": 7433 + }, + { + "epoch": 4.7992253066494515, + "grad_norm": 1.7251421777209415, + "learning_rate": 4.561447238597015e-05, + "loss": 0.9509, + "step": 7434 + }, + { + "epoch": 4.799870884441575, + "grad_norm": 1.7896968153041581, + "learning_rate": 4.556808595199098e-05, + "loss": 1.0897, + "step": 7435 + }, + { + "epoch": 4.800516462233699, + "grad_norm": 1.737095649552832, + "learning_rate": 4.552171889121614e-05, + "loss": 0.9374, + "step": 7436 + }, + { + "epoch": 4.801162040025823, + "grad_norm": 1.8608733607549381, + "learning_rate": 4.5475371212247194e-05, + "loss": 1.0393, + "step": 7437 + }, + { + "epoch": 4.801807617817947, + "grad_norm": 2.327853334790235, + "learning_rate": 4.5429042923682093e-05, + "loss": 0.9964, + "step": 7438 + }, + { + "epoch": 4.802453195610071, + "grad_norm": 1.8479613190730362, + "learning_rate": 4.538273403411518e-05, + "loss": 1.0893, + "step": 7439 + }, + { + "epoch": 4.803098773402195, + "grad_norm": 1.8798451500403242, + "learning_rate": 4.533644455213737e-05, + "loss": 0.926, + "step": 7440 + }, + { + "epoch": 4.803744351194319, + "grad_norm": 1.6979706557102017, + "learning_rate": 4.5290174486335735e-05, + "loss": 0.9092, + "step": 7441 + }, + { + "epoch": 4.804389928986443, + "grad_norm": 1.6994312141509371, + "learning_rate": 4.524392384529381e-05, + "loss": 0.9925, + "step": 7442 + }, + { + "epoch": 4.805035506778567, + "grad_norm": 1.620760410420854, + "learning_rate": 4.519769263759174e-05, + "loss": 0.8849, + "step": 7443 + }, + { + "epoch": 4.805681084570691, + "grad_norm": 1.6906053545860864, + "learning_rate": 4.5151480871805735e-05, + "loss": 0.9855, + "step": 7444 + }, + { + "epoch": 4.806326662362815, + "grad_norm": 2.1551732642567405, + "learning_rate": 4.5105288556508565e-05, + "loss": 0.9304, + "step": 7445 + }, + { + "epoch": 4.806972240154939, + "grad_norm": 1.7580146849383502, + "learning_rate": 4.505911570026952e-05, + "loss": 0.9955, + "step": 7446 + }, + { + "epoch": 4.8076178179470626, + "grad_norm": 1.8277348149671924, + "learning_rate": 4.501296231165396e-05, + "loss": 1.0422, + "step": 7447 + }, + { + "epoch": 4.8082633957391865, + "grad_norm": 1.862802485427412, + "learning_rate": 4.496682839922396e-05, + "loss": 1.0007, + "step": 7448 + }, + { + "epoch": 4.80890897353131, + "grad_norm": 1.7165052281399638, + "learning_rate": 4.492071397153785e-05, + "loss": 0.9716, + "step": 7449 + }, + { + "epoch": 4.809554551323434, + "grad_norm": 1.7664791282346395, + "learning_rate": 4.4874619037150165e-05, + "loss": 0.9779, + "step": 7450 + }, + { + "epoch": 4.810200129115558, + "grad_norm": 1.8995431480034903, + "learning_rate": 4.482854360461217e-05, + "loss": 1.114, + "step": 7451 + }, + { + "epoch": 4.810845706907682, + "grad_norm": 1.8908686239270602, + "learning_rate": 4.4782487682471296e-05, + "loss": 1.1015, + "step": 7452 + }, + { + "epoch": 4.811491284699806, + "grad_norm": 1.7049855696318295, + "learning_rate": 4.473645127927129e-05, + "loss": 0.9609, + "step": 7453 + }, + { + "epoch": 4.81213686249193, + "grad_norm": 1.7025068000354213, + "learning_rate": 4.469043440355252e-05, + "loss": 0.9126, + "step": 7454 + }, + { + "epoch": 4.812782440284054, + "grad_norm": 1.5587937143940198, + "learning_rate": 4.4644437063851516e-05, + "loss": 0.8823, + "step": 7455 + }, + { + "epoch": 4.813428018076178, + "grad_norm": 2.0814122703143902, + "learning_rate": 4.4598459268701305e-05, + "loss": 1.2109, + "step": 7456 + }, + { + "epoch": 4.814073595868302, + "grad_norm": 1.814903098253453, + "learning_rate": 4.45525010266312e-05, + "loss": 0.8921, + "step": 7457 + }, + { + "epoch": 4.814719173660426, + "grad_norm": 1.9008778846951213, + "learning_rate": 4.450656234616698e-05, + "loss": 1.0678, + "step": 7458 + }, + { + "epoch": 4.81536475145255, + "grad_norm": 1.722054970494762, + "learning_rate": 4.446064323583069e-05, + "loss": 1.0431, + "step": 7459 + }, + { + "epoch": 4.816010329244674, + "grad_norm": 1.7066794334862807, + "learning_rate": 4.4414743704140865e-05, + "loss": 0.9089, + "step": 7460 + }, + { + "epoch": 4.816655907036798, + "grad_norm": 1.6477299139326096, + "learning_rate": 4.436886375961231e-05, + "loss": 0.956, + "step": 7461 + }, + { + "epoch": 4.8173014848289215, + "grad_norm": 1.6525279255088834, + "learning_rate": 4.4323003410756224e-05, + "loss": 0.9924, + "step": 7462 + }, + { + "epoch": 4.817947062621046, + "grad_norm": 1.7444084343524917, + "learning_rate": 4.42771626660802e-05, + "loss": 0.9465, + "step": 7463 + }, + { + "epoch": 4.81859264041317, + "grad_norm": 1.8103698062532294, + "learning_rate": 4.4231341534088104e-05, + "loss": 0.87, + "step": 7464 + }, + { + "epoch": 4.819238218205294, + "grad_norm": 1.8634961080010868, + "learning_rate": 4.4185540023280384e-05, + "loss": 1.0081, + "step": 7465 + }, + { + "epoch": 4.819883795997418, + "grad_norm": 1.638810840690753, + "learning_rate": 4.413975814215356e-05, + "loss": 0.9235, + "step": 7466 + }, + { + "epoch": 4.820529373789542, + "grad_norm": 1.6300502161742338, + "learning_rate": 4.4093995899200644e-05, + "loss": 0.9433, + "step": 7467 + }, + { + "epoch": 4.821174951581666, + "grad_norm": 1.438662601015558, + "learning_rate": 4.404825330291114e-05, + "loss": 0.9334, + "step": 7468 + }, + { + "epoch": 4.82182052937379, + "grad_norm": 1.7082690907105345, + "learning_rate": 4.4002530361770645e-05, + "loss": 0.9948, + "step": 7469 + }, + { + "epoch": 4.822466107165914, + "grad_norm": 1.620698834413578, + "learning_rate": 4.395682708426122e-05, + "loss": 0.953, + "step": 7470 + }, + { + "epoch": 4.823111684958038, + "grad_norm": 1.6952761197901622, + "learning_rate": 4.3911143478861455e-05, + "loss": 0.9989, + "step": 7471 + }, + { + "epoch": 4.823757262750162, + "grad_norm": 2.6252398477645205, + "learning_rate": 4.3865479554045936e-05, + "loss": 1.0133, + "step": 7472 + }, + { + "epoch": 4.824402840542286, + "grad_norm": 1.573930048185765, + "learning_rate": 4.381983531828593e-05, + "loss": 0.8657, + "step": 7473 + }, + { + "epoch": 4.82504841833441, + "grad_norm": 1.736534330182427, + "learning_rate": 4.3774210780048944e-05, + "loss": 0.8903, + "step": 7474 + }, + { + "epoch": 4.8256939961265335, + "grad_norm": 1.8560455728360483, + "learning_rate": 4.372860594779865e-05, + "loss": 1.109, + "step": 7475 + }, + { + "epoch": 4.826339573918657, + "grad_norm": 1.8266494539489138, + "learning_rate": 4.3683020829995343e-05, + "loss": 1.0276, + "step": 7476 + }, + { + "epoch": 4.826985151710781, + "grad_norm": 1.5680442525288374, + "learning_rate": 4.363745543509557e-05, + "loss": 0.9001, + "step": 7477 + }, + { + "epoch": 4.827630729502905, + "grad_norm": 1.7235685309439162, + "learning_rate": 4.3591909771551995e-05, + "loss": 0.8806, + "step": 7478 + }, + { + "epoch": 4.828276307295029, + "grad_norm": 1.7739590861297267, + "learning_rate": 4.354638384781399e-05, + "loss": 0.9856, + "step": 7479 + }, + { + "epoch": 4.828921885087153, + "grad_norm": 1.8931404038924209, + "learning_rate": 4.3500877672327034e-05, + "loss": 1.0804, + "step": 7480 + }, + { + "epoch": 4.829567462879277, + "grad_norm": 1.8640210679566387, + "learning_rate": 4.345539125353298e-05, + "loss": 1.0417, + "step": 7481 + }, + { + "epoch": 4.830213040671401, + "grad_norm": 1.5959494369903626, + "learning_rate": 4.340992459987004e-05, + "loss": 0.853, + "step": 7482 + }, + { + "epoch": 4.830858618463525, + "grad_norm": 1.805514221578789, + "learning_rate": 4.336447771977275e-05, + "loss": 0.9826, + "step": 7483 + }, + { + "epoch": 4.831504196255649, + "grad_norm": 1.7858761281576194, + "learning_rate": 4.3319050621671985e-05, + "loss": 0.8934, + "step": 7484 + }, + { + "epoch": 4.832149774047773, + "grad_norm": 1.7108642185650833, + "learning_rate": 4.327364331399494e-05, + "loss": 1.0897, + "step": 7485 + }, + { + "epoch": 4.832795351839897, + "grad_norm": 2.193171267028724, + "learning_rate": 4.3228255805165136e-05, + "loss": 1.0061, + "step": 7486 + }, + { + "epoch": 4.833440929632021, + "grad_norm": 1.7292072441837851, + "learning_rate": 4.3182888103602426e-05, + "loss": 1.0444, + "step": 7487 + }, + { + "epoch": 4.834086507424145, + "grad_norm": 2.491731228600536, + "learning_rate": 4.3137540217722996e-05, + "loss": 0.9635, + "step": 7488 + }, + { + "epoch": 4.8347320852162685, + "grad_norm": 1.6987472488201858, + "learning_rate": 4.3092212155939294e-05, + "loss": 1.0133, + "step": 7489 + }, + { + "epoch": 4.8353776630083924, + "grad_norm": 1.854708415698269, + "learning_rate": 4.3046903926660296e-05, + "loss": 0.9974, + "step": 7490 + }, + { + "epoch": 4.836023240800516, + "grad_norm": 1.7608036509931797, + "learning_rate": 4.3001615538291016e-05, + "loss": 0.9705, + "step": 7491 + }, + { + "epoch": 4.83666881859264, + "grad_norm": 1.79608508367981, + "learning_rate": 4.2956346999232896e-05, + "loss": 1.0098, + "step": 7492 + }, + { + "epoch": 4.837314396384764, + "grad_norm": 2.0204639965069933, + "learning_rate": 4.291109831788387e-05, + "loss": 1.125, + "step": 7493 + }, + { + "epoch": 4.837959974176888, + "grad_norm": 1.9912977811505939, + "learning_rate": 4.286586950263793e-05, + "loss": 1.0221, + "step": 7494 + }, + { + "epoch": 4.838605551969012, + "grad_norm": 2.0627855491250635, + "learning_rate": 4.282066056188544e-05, + "loss": 0.9557, + "step": 7495 + }, + { + "epoch": 4.839251129761136, + "grad_norm": 1.7656678457640762, + "learning_rate": 4.27754715040133e-05, + "loss": 0.8699, + "step": 7496 + }, + { + "epoch": 4.83989670755326, + "grad_norm": 1.6141551633416056, + "learning_rate": 4.273030233740435e-05, + "loss": 0.912, + "step": 7497 + }, + { + "epoch": 4.840542285345384, + "grad_norm": 1.945837994931662, + "learning_rate": 4.268515307043808e-05, + "loss": 0.9687, + "step": 7498 + }, + { + "epoch": 4.841187863137508, + "grad_norm": 1.6295422048642714, + "learning_rate": 4.2640023711490154e-05, + "loss": 0.9651, + "step": 7499 + }, + { + "epoch": 4.841833440929632, + "grad_norm": 1.8472683403659589, + "learning_rate": 4.2594914268932404e-05, + "loss": 0.8943, + "step": 7500 + }, + { + "epoch": 4.842479018721756, + "grad_norm": 2.0890317619307663, + "learning_rate": 4.254982475113322e-05, + "loss": 1.0304, + "step": 7501 + }, + { + "epoch": 4.84312459651388, + "grad_norm": 1.6243418717635176, + "learning_rate": 4.250475516645719e-05, + "loss": 0.9802, + "step": 7502 + }, + { + "epoch": 4.8437701743060035, + "grad_norm": 1.6108511995787858, + "learning_rate": 4.2459705523265035e-05, + "loss": 0.8449, + "step": 7503 + }, + { + "epoch": 4.8444157520981275, + "grad_norm": 1.8970669123454524, + "learning_rate": 4.2414675829914077e-05, + "loss": 0.995, + "step": 7504 + }, + { + "epoch": 4.845061329890251, + "grad_norm": 1.8139724806335835, + "learning_rate": 4.236966609475777e-05, + "loss": 0.9579, + "step": 7505 + }, + { + "epoch": 4.845706907682375, + "grad_norm": 1.7057694486976995, + "learning_rate": 4.232467632614585e-05, + "loss": 1.0616, + "step": 7506 + }, + { + "epoch": 4.846352485474499, + "grad_norm": 1.6812775531723305, + "learning_rate": 4.227970653242441e-05, + "loss": 0.9822, + "step": 7507 + }, + { + "epoch": 4.846998063266623, + "grad_norm": 1.615394007241576, + "learning_rate": 4.22347567219358e-05, + "loss": 0.882, + "step": 7508 + }, + { + "epoch": 4.847643641058747, + "grad_norm": 1.8774871447744115, + "learning_rate": 4.21898269030187e-05, + "loss": 1.0102, + "step": 7509 + }, + { + "epoch": 4.848289218850871, + "grad_norm": 1.7260283224805044, + "learning_rate": 4.2144917084008027e-05, + "loss": 0.9859, + "step": 7510 + }, + { + "epoch": 4.848934796642995, + "grad_norm": 1.718063553658753, + "learning_rate": 4.210002727323503e-05, + "loss": 0.9792, + "step": 7511 + }, + { + "epoch": 4.84958037443512, + "grad_norm": 1.8407603816965676, + "learning_rate": 4.2055157479027245e-05, + "loss": 0.9505, + "step": 7512 + }, + { + "epoch": 4.850225952227244, + "grad_norm": 1.8979211592643026, + "learning_rate": 4.201030770970848e-05, + "loss": 0.974, + "step": 7513 + }, + { + "epoch": 4.850871530019368, + "grad_norm": 1.6483980441910493, + "learning_rate": 4.196547797359877e-05, + "loss": 0.955, + "step": 7514 + }, + { + "epoch": 4.851517107811492, + "grad_norm": 1.7132450393349856, + "learning_rate": 4.192066827901467e-05, + "loss": 0.9118, + "step": 7515 + }, + { + "epoch": 4.8521626856036155, + "grad_norm": 1.8361485580904, + "learning_rate": 4.1875878634268655e-05, + "loss": 1.0039, + "step": 7516 + }, + { + "epoch": 4.8528082633957395, + "grad_norm": 1.9738533356881731, + "learning_rate": 4.183110904766972e-05, + "loss": 0.9716, + "step": 7517 + }, + { + "epoch": 4.853453841187863, + "grad_norm": 1.8173414580407552, + "learning_rate": 4.17863595275232e-05, + "loss": 0.9446, + "step": 7518 + }, + { + "epoch": 4.854099418979987, + "grad_norm": 1.7565809074392829, + "learning_rate": 4.174163008213047e-05, + "loss": 0.9369, + "step": 7519 + }, + { + "epoch": 4.854744996772111, + "grad_norm": 1.7981826768485314, + "learning_rate": 4.1696920719789285e-05, + "loss": 1.0548, + "step": 7520 + }, + { + "epoch": 4.855390574564235, + "grad_norm": 2.2347496368669013, + "learning_rate": 4.1652231448793847e-05, + "loss": 1.0779, + "step": 7521 + }, + { + "epoch": 4.856036152356359, + "grad_norm": 1.7396753315863842, + "learning_rate": 4.1607562277434306e-05, + "loss": 0.921, + "step": 7522 + }, + { + "epoch": 4.856681730148483, + "grad_norm": 1.627434983681693, + "learning_rate": 4.156291321399738e-05, + "loss": 0.8997, + "step": 7523 + }, + { + "epoch": 4.857327307940607, + "grad_norm": 1.8376682768661183, + "learning_rate": 4.151828426676595e-05, + "loss": 1.0287, + "step": 7524 + }, + { + "epoch": 4.857972885732731, + "grad_norm": 1.5876733285089535, + "learning_rate": 4.1473675444018995e-05, + "loss": 0.9278, + "step": 7525 + }, + { + "epoch": 4.858618463524855, + "grad_norm": 1.6691120514812068, + "learning_rate": 4.142908675403205e-05, + "loss": 0.9025, + "step": 7526 + }, + { + "epoch": 4.859264041316979, + "grad_norm": 1.756932266661235, + "learning_rate": 4.138451820507679e-05, + "loss": 0.8942, + "step": 7527 + }, + { + "epoch": 4.859909619109103, + "grad_norm": 1.8755492185638845, + "learning_rate": 4.1339969805421e-05, + "loss": 1.0656, + "step": 7528 + }, + { + "epoch": 4.860555196901227, + "grad_norm": 2.106407654584005, + "learning_rate": 4.1295441563329016e-05, + "loss": 1.0822, + "step": 7529 + }, + { + "epoch": 4.8612007746933505, + "grad_norm": 1.7116756511680793, + "learning_rate": 4.125093348706125e-05, + "loss": 0.9968, + "step": 7530 + }, + { + "epoch": 4.8618463524854745, + "grad_norm": 1.9344043934692792, + "learning_rate": 4.1206445584874396e-05, + "loss": 0.9804, + "step": 7531 + }, + { + "epoch": 4.862491930277598, + "grad_norm": 1.7974567002445534, + "learning_rate": 4.116197786502142e-05, + "loss": 1.0075, + "step": 7532 + }, + { + "epoch": 4.863137508069722, + "grad_norm": 1.6788870675175214, + "learning_rate": 4.111753033575156e-05, + "loss": 0.9472, + "step": 7533 + }, + { + "epoch": 4.863783085861846, + "grad_norm": 1.9225623236128344, + "learning_rate": 4.107310300531027e-05, + "loss": 1.1192, + "step": 7534 + }, + { + "epoch": 4.86442866365397, + "grad_norm": 7.416802141895717, + "learning_rate": 4.102869588193932e-05, + "loss": 1.0072, + "step": 7535 + }, + { + "epoch": 4.865074241446094, + "grad_norm": 1.7368196420807105, + "learning_rate": 4.098430897387662e-05, + "loss": 0.9738, + "step": 7536 + }, + { + "epoch": 4.865719819238218, + "grad_norm": 1.833581762211891, + "learning_rate": 4.093994228935654e-05, + "loss": 1.1017, + "step": 7537 + }, + { + "epoch": 4.866365397030342, + "grad_norm": 1.6393967348204397, + "learning_rate": 4.089559583660942e-05, + "loss": 0.912, + "step": 7538 + }, + { + "epoch": 4.867010974822466, + "grad_norm": 1.6396928666058173, + "learning_rate": 4.085126962386202e-05, + "loss": 0.8909, + "step": 7539 + }, + { + "epoch": 4.86765655261459, + "grad_norm": 1.6730527912680495, + "learning_rate": 4.08069636593374e-05, + "loss": 0.9357, + "step": 7540 + }, + { + "epoch": 4.868302130406714, + "grad_norm": 1.749040057065672, + "learning_rate": 4.0762677951254685e-05, + "loss": 1.0141, + "step": 7541 + }, + { + "epoch": 4.868947708198838, + "grad_norm": 1.9681881605768885, + "learning_rate": 4.071841250782929e-05, + "loss": 1.1416, + "step": 7542 + }, + { + "epoch": 4.869593285990962, + "grad_norm": 1.953653118341507, + "learning_rate": 4.06741673372731e-05, + "loss": 1.037, + "step": 7543 + }, + { + "epoch": 4.870238863783086, + "grad_norm": 1.7326848313133754, + "learning_rate": 4.0629942447793865e-05, + "loss": 1.0068, + "step": 7544 + }, + { + "epoch": 4.8708844415752095, + "grad_norm": 1.9241431145812649, + "learning_rate": 4.058573784759581e-05, + "loss": 0.953, + "step": 7545 + }, + { + "epoch": 4.871530019367333, + "grad_norm": 1.6956621805056877, + "learning_rate": 4.054155354487945e-05, + "loss": 0.8895, + "step": 7546 + }, + { + "epoch": 4.872175597159457, + "grad_norm": 1.8665428810027538, + "learning_rate": 4.0497389547841265e-05, + "loss": 0.9888, + "step": 7547 + }, + { + "epoch": 4.872821174951581, + "grad_norm": 1.6168225929111402, + "learning_rate": 4.045324586467426e-05, + "loss": 0.9799, + "step": 7548 + }, + { + "epoch": 4.873466752743706, + "grad_norm": 1.6953986483713976, + "learning_rate": 4.040912250356756e-05, + "loss": 0.9564, + "step": 7549 + }, + { + "epoch": 4.87411233053583, + "grad_norm": 1.8700957873623432, + "learning_rate": 4.036501947270636e-05, + "loss": 1.0091, + "step": 7550 + }, + { + "epoch": 4.874757908327954, + "grad_norm": 1.681153678960867, + "learning_rate": 4.032093678027237e-05, + "loss": 0.8849, + "step": 7551 + }, + { + "epoch": 4.875403486120078, + "grad_norm": 1.8878638903708425, + "learning_rate": 4.02768744344434e-05, + "loss": 0.9516, + "step": 7552 + }, + { + "epoch": 4.876049063912202, + "grad_norm": 1.5583932788309058, + "learning_rate": 4.023283244339331e-05, + "loss": 0.8544, + "step": 7553 + }, + { + "epoch": 4.876694641704326, + "grad_norm": 1.7677079383443834, + "learning_rate": 4.018881081529251e-05, + "loss": 0.9561, + "step": 7554 + }, + { + "epoch": 4.87734021949645, + "grad_norm": 1.7802401105338401, + "learning_rate": 4.014480955830741e-05, + "loss": 0.9467, + "step": 7555 + }, + { + "epoch": 4.877985797288574, + "grad_norm": 1.7360125106375153, + "learning_rate": 4.010082868060068e-05, + "loss": 0.8568, + "step": 7556 + }, + { + "epoch": 4.8786313750806976, + "grad_norm": 1.4483866267125838, + "learning_rate": 4.005686819033126e-05, + "loss": 0.8917, + "step": 7557 + }, + { + "epoch": 4.8792769528728215, + "grad_norm": 1.6996921821775974, + "learning_rate": 4.001292809565428e-05, + "loss": 0.9354, + "step": 7558 + }, + { + "epoch": 4.879922530664945, + "grad_norm": 1.7069709668242055, + "learning_rate": 3.996900840472107e-05, + "loss": 0.9176, + "step": 7559 + }, + { + "epoch": 4.880568108457069, + "grad_norm": 1.8942580458513552, + "learning_rate": 3.992510912567917e-05, + "loss": 0.9584, + "step": 7560 + }, + { + "epoch": 4.881213686249193, + "grad_norm": 1.864367272756962, + "learning_rate": 3.9881230266672354e-05, + "loss": 0.9767, + "step": 7561 + }, + { + "epoch": 4.881859264041317, + "grad_norm": 1.6560770726744691, + "learning_rate": 3.983737183584069e-05, + "loss": 1.0358, + "step": 7562 + }, + { + "epoch": 4.882504841833441, + "grad_norm": 1.6942442260532886, + "learning_rate": 3.979353384132028e-05, + "loss": 0.8372, + "step": 7563 + }, + { + "epoch": 4.883150419625565, + "grad_norm": 1.858708885216614, + "learning_rate": 3.97497162912435e-05, + "loss": 0.9126, + "step": 7564 + }, + { + "epoch": 4.883795997417689, + "grad_norm": 1.6834724369146385, + "learning_rate": 3.9705919193739096e-05, + "loss": 0.9572, + "step": 7565 + }, + { + "epoch": 4.884441575209813, + "grad_norm": 1.6905694139736516, + "learning_rate": 3.966214255693176e-05, + "loss": 0.9625, + "step": 7566 + }, + { + "epoch": 4.885087153001937, + "grad_norm": 1.830787845263346, + "learning_rate": 3.961838638894251e-05, + "loss": 1.1106, + "step": 7567 + }, + { + "epoch": 4.885732730794061, + "grad_norm": 1.8069017623743109, + "learning_rate": 3.9574650697888716e-05, + "loss": 0.8476, + "step": 7568 + }, + { + "epoch": 4.886378308586185, + "grad_norm": 1.8043855852119168, + "learning_rate": 3.953093549188365e-05, + "loss": 0.9717, + "step": 7569 + }, + { + "epoch": 4.887023886378309, + "grad_norm": 1.8585980049335922, + "learning_rate": 3.948724077903693e-05, + "loss": 1.028, + "step": 7570 + }, + { + "epoch": 4.887669464170433, + "grad_norm": 1.889531220854511, + "learning_rate": 3.944356656745454e-05, + "loss": 1.0687, + "step": 7571 + }, + { + "epoch": 4.8883150419625565, + "grad_norm": 1.7650697876677237, + "learning_rate": 3.9399912865238296e-05, + "loss": 1.0038, + "step": 7572 + }, + { + "epoch": 4.88896061975468, + "grad_norm": 1.7257417290033121, + "learning_rate": 3.935627968048657e-05, + "loss": 1.0128, + "step": 7573 + }, + { + "epoch": 4.889606197546804, + "grad_norm": 1.8205389529869629, + "learning_rate": 3.931266702129374e-05, + "loss": 0.8729, + "step": 7574 + }, + { + "epoch": 4.890251775338928, + "grad_norm": 1.6946428088338545, + "learning_rate": 3.9269074895750314e-05, + "loss": 0.983, + "step": 7575 + }, + { + "epoch": 4.890897353131052, + "grad_norm": 1.8663706097092716, + "learning_rate": 3.922550331194318e-05, + "loss": 1.0353, + "step": 7576 + }, + { + "epoch": 4.891542930923176, + "grad_norm": 1.5731439852991416, + "learning_rate": 3.918195227795534e-05, + "loss": 0.8391, + "step": 7577 + }, + { + "epoch": 4.8921885087153, + "grad_norm": 2.4177971759757124, + "learning_rate": 3.913842180186584e-05, + "loss": 0.9762, + "step": 7578 + }, + { + "epoch": 4.892834086507424, + "grad_norm": 1.6968280964670064, + "learning_rate": 3.909491189175013e-05, + "loss": 0.9896, + "step": 7579 + }, + { + "epoch": 4.893479664299548, + "grad_norm": 1.7568196089392822, + "learning_rate": 3.9051422555679726e-05, + "loss": 1.0235, + "step": 7580 + }, + { + "epoch": 4.894125242091672, + "grad_norm": 1.7756032532582247, + "learning_rate": 3.9007953801722364e-05, + "loss": 1.0129, + "step": 7581 + }, + { + "epoch": 4.894770819883796, + "grad_norm": 1.9015312357528875, + "learning_rate": 3.8964505637941946e-05, + "loss": 1.1003, + "step": 7582 + }, + { + "epoch": 4.89541639767592, + "grad_norm": 1.6332335473423638, + "learning_rate": 3.8921078072398544e-05, + "loss": 0.8958, + "step": 7583 + }, + { + "epoch": 4.896061975468044, + "grad_norm": 1.9381356390087765, + "learning_rate": 3.8877671113148426e-05, + "loss": 0.874, + "step": 7584 + }, + { + "epoch": 4.896707553260168, + "grad_norm": 1.6769750825526006, + "learning_rate": 3.883428476824403e-05, + "loss": 0.9413, + "step": 7585 + }, + { + "epoch": 4.8973531310522915, + "grad_norm": 1.7091942259501427, + "learning_rate": 3.879091904573394e-05, + "loss": 0.9361, + "step": 7586 + }, + { + "epoch": 4.8979987088444155, + "grad_norm": 1.6893558668572746, + "learning_rate": 3.874757395366309e-05, + "loss": 0.9068, + "step": 7587 + }, + { + "epoch": 4.898644286636539, + "grad_norm": 1.725606123686429, + "learning_rate": 3.8704249500072276e-05, + "loss": 0.9278, + "step": 7588 + }, + { + "epoch": 4.899289864428663, + "grad_norm": 2.139504897884997, + "learning_rate": 3.866094569299866e-05, + "loss": 0.9271, + "step": 7589 + }, + { + "epoch": 4.899935442220787, + "grad_norm": 1.8403914154270118, + "learning_rate": 3.861766254047569e-05, + "loss": 0.968, + "step": 7590 + }, + { + "epoch": 4.900581020012911, + "grad_norm": 1.6318543783777864, + "learning_rate": 3.8574400050532686e-05, + "loss": 0.9327, + "step": 7591 + }, + { + "epoch": 4.901226597805035, + "grad_norm": 1.7341512809900366, + "learning_rate": 3.8531158231195285e-05, + "loss": 0.9657, + "step": 7592 + }, + { + "epoch": 4.901872175597159, + "grad_norm": 1.8297240224155393, + "learning_rate": 3.848793709048546e-05, + "loss": 1.0457, + "step": 7593 + }, + { + "epoch": 4.902517753389283, + "grad_norm": 1.6062349098715636, + "learning_rate": 3.844473663642102e-05, + "loss": 0.905, + "step": 7594 + }, + { + "epoch": 4.903163331181407, + "grad_norm": 1.6958107463277623, + "learning_rate": 3.8401556877016085e-05, + "loss": 0.917, + "step": 7595 + }, + { + "epoch": 4.903808908973531, + "grad_norm": 1.6804896359595178, + "learning_rate": 3.835839782028112e-05, + "loss": 0.9962, + "step": 7596 + }, + { + "epoch": 4.904454486765655, + "grad_norm": 1.8402346892557964, + "learning_rate": 3.831525947422238e-05, + "loss": 1.1006, + "step": 7597 + }, + { + "epoch": 4.90510006455778, + "grad_norm": 1.9223371514032679, + "learning_rate": 3.827214184684261e-05, + "loss": 1.1674, + "step": 7598 + }, + { + "epoch": 4.9057456423499035, + "grad_norm": 1.8543021514822493, + "learning_rate": 3.822904494614057e-05, + "loss": 0.9247, + "step": 7599 + }, + { + "epoch": 4.906391220142027, + "grad_norm": 1.9611609659662312, + "learning_rate": 3.818596878011107e-05, + "loss": 1.072, + "step": 7600 + }, + { + "epoch": 4.907036797934151, + "grad_norm": 1.7980328525811164, + "learning_rate": 3.814291335674529e-05, + "loss": 1.0908, + "step": 7601 + }, + { + "epoch": 4.907682375726275, + "grad_norm": 2.0465046399999696, + "learning_rate": 3.8099878684030493e-05, + "loss": 0.9837, + "step": 7602 + }, + { + "epoch": 4.908327953518399, + "grad_norm": 1.7490102916946244, + "learning_rate": 3.8056864769949906e-05, + "loss": 0.9381, + "step": 7603 + }, + { + "epoch": 4.908973531310523, + "grad_norm": 1.938164627116501, + "learning_rate": 3.8013871622483184e-05, + "loss": 0.9955, + "step": 7604 + }, + { + "epoch": 4.909619109102647, + "grad_norm": 1.7891044978168673, + "learning_rate": 3.7970899249605956e-05, + "loss": 1.0516, + "step": 7605 + }, + { + "epoch": 4.910264686894771, + "grad_norm": 1.6987887018567172, + "learning_rate": 3.792794765929007e-05, + "loss": 0.9192, + "step": 7606 + }, + { + "epoch": 4.910910264686895, + "grad_norm": 1.641056276580024, + "learning_rate": 3.7885016859503445e-05, + "loss": 0.9239, + "step": 7607 + }, + { + "epoch": 4.911555842479019, + "grad_norm": 1.7148184800339759, + "learning_rate": 3.784210685821025e-05, + "loss": 0.871, + "step": 7608 + }, + { + "epoch": 4.912201420271143, + "grad_norm": 1.4148252030794057, + "learning_rate": 3.779921766337069e-05, + "loss": 0.8606, + "step": 7609 + }, + { + "epoch": 4.912846998063267, + "grad_norm": 2.050566618353991, + "learning_rate": 3.775634928294116e-05, + "loss": 0.9486, + "step": 7610 + }, + { + "epoch": 4.913492575855391, + "grad_norm": 1.6440845214180595, + "learning_rate": 3.7713501724874174e-05, + "loss": 0.8991, + "step": 7611 + }, + { + "epoch": 4.914138153647515, + "grad_norm": 1.8952692368410105, + "learning_rate": 3.767067499711849e-05, + "loss": 1.0119, + "step": 7612 + }, + { + "epoch": 4.9147837314396385, + "grad_norm": 1.7326090260562754, + "learning_rate": 3.762786910761884e-05, + "loss": 1.014, + "step": 7613 + }, + { + "epoch": 4.9154293092317625, + "grad_norm": 1.6648272913468807, + "learning_rate": 3.7585084064316076e-05, + "loss": 0.9393, + "step": 7614 + }, + { + "epoch": 4.916074887023886, + "grad_norm": 1.5964286025162195, + "learning_rate": 3.7542319875147475e-05, + "loss": 0.8922, + "step": 7615 + }, + { + "epoch": 4.91672046481601, + "grad_norm": 1.5707459140271276, + "learning_rate": 3.7499576548046075e-05, + "loss": 0.9334, + "step": 7616 + }, + { + "epoch": 4.917366042608134, + "grad_norm": 1.6911029187850728, + "learning_rate": 3.7456854090941196e-05, + "loss": 0.8866, + "step": 7617 + }, + { + "epoch": 4.918011620400258, + "grad_norm": 1.8089789254530824, + "learning_rate": 3.7414152511758456e-05, + "loss": 1.0237, + "step": 7618 + }, + { + "epoch": 4.918657198192382, + "grad_norm": 1.8351559839120761, + "learning_rate": 3.737147181841925e-05, + "loss": 0.8875, + "step": 7619 + }, + { + "epoch": 4.919302775984506, + "grad_norm": 1.9400937461731862, + "learning_rate": 3.732881201884142e-05, + "loss": 1.0409, + "step": 7620 + }, + { + "epoch": 4.91994835377663, + "grad_norm": 1.6492763854287251, + "learning_rate": 3.72861731209388e-05, + "loss": 0.9072, + "step": 7621 + }, + { + "epoch": 4.920593931568754, + "grad_norm": 2.065535133941013, + "learning_rate": 3.7243555132621216e-05, + "loss": 1.0496, + "step": 7622 + }, + { + "epoch": 4.921239509360878, + "grad_norm": 1.816601902634341, + "learning_rate": 3.720095806179491e-05, + "loss": 1.0688, + "step": 7623 + }, + { + "epoch": 4.921885087153002, + "grad_norm": 1.7554269394975448, + "learning_rate": 3.715838191636204e-05, + "loss": 1.0688, + "step": 7624 + }, + { + "epoch": 4.922530664945126, + "grad_norm": 1.974293514459141, + "learning_rate": 3.71158267042208e-05, + "loss": 1.1054, + "step": 7625 + }, + { + "epoch": 4.92317624273725, + "grad_norm": 1.7969988121684268, + "learning_rate": 3.707329243326577e-05, + "loss": 1.0787, + "step": 7626 + }, + { + "epoch": 4.923821820529374, + "grad_norm": 1.9454434359058779, + "learning_rate": 3.703077911138749e-05, + "loss": 0.8815, + "step": 7627 + }, + { + "epoch": 4.9244673983214975, + "grad_norm": 1.5906593641672742, + "learning_rate": 3.698828674647249e-05, + "loss": 0.9205, + "step": 7628 + }, + { + "epoch": 4.925112976113621, + "grad_norm": 1.7796758579266434, + "learning_rate": 3.694581534640369e-05, + "loss": 1.0533, + "step": 7629 + }, + { + "epoch": 4.925758553905745, + "grad_norm": 1.8090252027222364, + "learning_rate": 3.6903364919059904e-05, + "loss": 1.0402, + "step": 7630 + }, + { + "epoch": 4.926404131697869, + "grad_norm": 1.5719067708471546, + "learning_rate": 3.686093547231614e-05, + "loss": 0.8869, + "step": 7631 + }, + { + "epoch": 4.927049709489993, + "grad_norm": 2.1028541904188285, + "learning_rate": 3.6818527014043515e-05, + "loss": 1.1145, + "step": 7632 + }, + { + "epoch": 4.927695287282117, + "grad_norm": 1.9628356514114376, + "learning_rate": 3.6776139552109236e-05, + "loss": 1.2942, + "step": 7633 + }, + { + "epoch": 4.928340865074241, + "grad_norm": 1.636762331521942, + "learning_rate": 3.67337730943766e-05, + "loss": 0.8475, + "step": 7634 + }, + { + "epoch": 4.928986442866366, + "grad_norm": 1.9610806469986217, + "learning_rate": 3.669142764870503e-05, + "loss": 1.1716, + "step": 7635 + }, + { + "epoch": 4.92963202065849, + "grad_norm": 1.6603744886180773, + "learning_rate": 3.6649103222950005e-05, + "loss": 0.8793, + "step": 7636 + }, + { + "epoch": 4.930277598450614, + "grad_norm": 2.111740065009692, + "learning_rate": 3.660679982496328e-05, + "loss": 1.1619, + "step": 7637 + }, + { + "epoch": 4.930923176242738, + "grad_norm": 1.5498125101932756, + "learning_rate": 3.656451746259245e-05, + "loss": 0.8544, + "step": 7638 + }, + { + "epoch": 4.931568754034862, + "grad_norm": 1.7037844182160216, + "learning_rate": 3.652225614368132e-05, + "loss": 0.9676, + "step": 7639 + }, + { + "epoch": 4.9322143318269855, + "grad_norm": 1.8685042927345117, + "learning_rate": 3.648001587606992e-05, + "loss": 1.0317, + "step": 7640 + }, + { + "epoch": 4.9328599096191095, + "grad_norm": 1.7363661920198632, + "learning_rate": 3.6437796667594155e-05, + "loss": 0.9168, + "step": 7641 + }, + { + "epoch": 4.933505487411233, + "grad_norm": 2.0733808373180547, + "learning_rate": 3.6395598526086115e-05, + "loss": 1.1263, + "step": 7642 + }, + { + "epoch": 4.934151065203357, + "grad_norm": 1.6529635142937176, + "learning_rate": 3.635342145937413e-05, + "loss": 0.9401, + "step": 7643 + }, + { + "epoch": 4.934796642995481, + "grad_norm": 2.1717679133406778, + "learning_rate": 3.6311265475282306e-05, + "loss": 1.0221, + "step": 7644 + }, + { + "epoch": 4.935442220787605, + "grad_norm": 1.8720492473337105, + "learning_rate": 3.6269130581631135e-05, + "loss": 1.1465, + "step": 7645 + }, + { + "epoch": 4.936087798579729, + "grad_norm": 1.5778955475952339, + "learning_rate": 3.62270167862371e-05, + "loss": 1.0144, + "step": 7646 + }, + { + "epoch": 4.936733376371853, + "grad_norm": 1.859268941639749, + "learning_rate": 3.6184924096912596e-05, + "loss": 0.929, + "step": 7647 + }, + { + "epoch": 4.937378954163977, + "grad_norm": 1.9017878558569454, + "learning_rate": 3.61428525214664e-05, + "loss": 1.0484, + "step": 7648 + }, + { + "epoch": 4.938024531956101, + "grad_norm": 1.7426070601408534, + "learning_rate": 3.6100802067703205e-05, + "loss": 0.9695, + "step": 7649 + }, + { + "epoch": 4.938670109748225, + "grad_norm": 1.9134321405831693, + "learning_rate": 3.605877274342369e-05, + "loss": 1.0352, + "step": 7650 + }, + { + "epoch": 4.939315687540349, + "grad_norm": 1.8821014879658406, + "learning_rate": 3.601676455642487e-05, + "loss": 1.0498, + "step": 7651 + }, + { + "epoch": 4.939961265332473, + "grad_norm": 1.9404051887131901, + "learning_rate": 3.5974777514499695e-05, + "loss": 1.0322, + "step": 7652 + }, + { + "epoch": 4.940606843124597, + "grad_norm": 1.7739920735742636, + "learning_rate": 3.593281162543706e-05, + "loss": 0.9558, + "step": 7653 + }, + { + "epoch": 4.941252420916721, + "grad_norm": 1.732189787320943, + "learning_rate": 3.589086689702221e-05, + "loss": 0.9931, + "step": 7654 + }, + { + "epoch": 4.9418979987088445, + "grad_norm": 1.9744550242671608, + "learning_rate": 3.5848943337036266e-05, + "loss": 1.2833, + "step": 7655 + }, + { + "epoch": 4.942543576500968, + "grad_norm": 1.5412373452832389, + "learning_rate": 3.580704095325652e-05, + "loss": 0.817, + "step": 7656 + }, + { + "epoch": 4.943189154293092, + "grad_norm": 2.0641112683431846, + "learning_rate": 3.576515975345625e-05, + "loss": 1.0342, + "step": 7657 + }, + { + "epoch": 4.943834732085216, + "grad_norm": 2.1915529024047147, + "learning_rate": 3.572329974540489e-05, + "loss": 1.0449, + "step": 7658 + }, + { + "epoch": 4.94448030987734, + "grad_norm": 1.757499746295605, + "learning_rate": 3.568146093686791e-05, + "loss": 0.9056, + "step": 7659 + }, + { + "epoch": 4.945125887669464, + "grad_norm": 1.911123287379401, + "learning_rate": 3.5639643335606815e-05, + "loss": 1.0574, + "step": 7660 + }, + { + "epoch": 4.945771465461588, + "grad_norm": 1.656847103926237, + "learning_rate": 3.5597846949379185e-05, + "loss": 0.8528, + "step": 7661 + }, + { + "epoch": 4.946417043253712, + "grad_norm": 1.8181936328660522, + "learning_rate": 3.555607178593881e-05, + "loss": 1.0286, + "step": 7662 + }, + { + "epoch": 4.947062621045836, + "grad_norm": 1.687279872815953, + "learning_rate": 3.5514317853035266e-05, + "loss": 0.9988, + "step": 7663 + }, + { + "epoch": 4.94770819883796, + "grad_norm": 1.5684466069466805, + "learning_rate": 3.5472585158414374e-05, + "loss": 0.8355, + "step": 7664 + }, + { + "epoch": 4.948353776630084, + "grad_norm": 1.8788935486678504, + "learning_rate": 3.543087370981809e-05, + "loss": 1.1002, + "step": 7665 + }, + { + "epoch": 4.948999354422208, + "grad_norm": 1.7475035055037347, + "learning_rate": 3.5389183514984184e-05, + "loss": 0.9558, + "step": 7666 + }, + { + "epoch": 4.949644932214332, + "grad_norm": 1.6986826527310277, + "learning_rate": 3.534751458164664e-05, + "loss": 1.0144, + "step": 7667 + }, + { + "epoch": 4.950290510006456, + "grad_norm": 1.6332343742651316, + "learning_rate": 3.5305866917535596e-05, + "loss": 0.9195, + "step": 7668 + }, + { + "epoch": 4.9509360877985795, + "grad_norm": 1.6918282435333558, + "learning_rate": 3.5264240530376954e-05, + "loss": 0.9372, + "step": 7669 + }, + { + "epoch": 4.9515816655907035, + "grad_norm": 1.6961649522398354, + "learning_rate": 3.5222635427892984e-05, + "loss": 0.9546, + "step": 7670 + }, + { + "epoch": 4.952227243382827, + "grad_norm": 1.782926392375325, + "learning_rate": 3.5181051617801844e-05, + "loss": 0.8088, + "step": 7671 + }, + { + "epoch": 4.952872821174951, + "grad_norm": 1.9271158010090677, + "learning_rate": 3.5139489107817646e-05, + "loss": 1.0289, + "step": 7672 + }, + { + "epoch": 4.953518398967075, + "grad_norm": 1.7997312148759366, + "learning_rate": 3.509794790565078e-05, + "loss": 1.0222, + "step": 7673 + }, + { + "epoch": 4.954163976759199, + "grad_norm": 1.584861585855831, + "learning_rate": 3.505642801900759e-05, + "loss": 0.9832, + "step": 7674 + }, + { + "epoch": 4.954809554551323, + "grad_norm": 1.8495771745933571, + "learning_rate": 3.501492945559033e-05, + "loss": 0.8668, + "step": 7675 + }, + { + "epoch": 4.955455132343447, + "grad_norm": 1.614200744059741, + "learning_rate": 3.4973452223097496e-05, + "loss": 0.9487, + "step": 7676 + }, + { + "epoch": 4.956100710135571, + "grad_norm": 1.9092573981724499, + "learning_rate": 3.493199632922358e-05, + "loss": 0.9887, + "step": 7677 + }, + { + "epoch": 4.956746287927695, + "grad_norm": 1.7315477463733615, + "learning_rate": 3.4890561781658945e-05, + "loss": 0.9707, + "step": 7678 + }, + { + "epoch": 4.957391865719819, + "grad_norm": 1.6192683088301845, + "learning_rate": 3.484914858809026e-05, + "loss": 0.8565, + "step": 7679 + }, + { + "epoch": 4.958037443511943, + "grad_norm": 1.8754659109869833, + "learning_rate": 3.480775675620006e-05, + "loss": 1.0218, + "step": 7680 + }, + { + "epoch": 4.958683021304067, + "grad_norm": 1.7336327316808349, + "learning_rate": 3.476638629366696e-05, + "loss": 1.0259, + "step": 7681 + }, + { + "epoch": 4.959328599096191, + "grad_norm": 1.9034989040622075, + "learning_rate": 3.47250372081656e-05, + "loss": 1.1119, + "step": 7682 + }, + { + "epoch": 4.9599741768883145, + "grad_norm": 1.6476334682898897, + "learning_rate": 3.468370950736667e-05, + "loss": 0.9393, + "step": 7683 + }, + { + "epoch": 4.960619754680439, + "grad_norm": 2.362724691849978, + "learning_rate": 3.4642403198936896e-05, + "loss": 0.9931, + "step": 7684 + }, + { + "epoch": 4.961265332472563, + "grad_norm": 1.7723145008172145, + "learning_rate": 3.460111829053902e-05, + "loss": 1.0007, + "step": 7685 + }, + { + "epoch": 4.961910910264687, + "grad_norm": 1.7267441015825238, + "learning_rate": 3.4559854789831784e-05, + "loss": 0.882, + "step": 7686 + }, + { + "epoch": 4.962556488056811, + "grad_norm": 1.7858883434046575, + "learning_rate": 3.4518612704470104e-05, + "loss": 1.0173, + "step": 7687 + }, + { + "epoch": 4.963202065848935, + "grad_norm": 1.7642826868331223, + "learning_rate": 3.447739204210471e-05, + "loss": 0.9309, + "step": 7688 + }, + { + "epoch": 4.963847643641059, + "grad_norm": 1.7482650285063601, + "learning_rate": 3.4436192810382454e-05, + "loss": 0.883, + "step": 7689 + }, + { + "epoch": 4.964493221433183, + "grad_norm": 1.5017533312547766, + "learning_rate": 3.4395015016946344e-05, + "loss": 0.8214, + "step": 7690 + }, + { + "epoch": 4.965138799225307, + "grad_norm": 2.869400711865284, + "learning_rate": 3.4353858669435185e-05, + "loss": 1.026, + "step": 7691 + }, + { + "epoch": 4.965784377017431, + "grad_norm": 1.5999242458633485, + "learning_rate": 3.4312723775483875e-05, + "loss": 0.9378, + "step": 7692 + }, + { + "epoch": 4.966429954809555, + "grad_norm": 1.686444107732542, + "learning_rate": 3.42716103427235e-05, + "loss": 0.9764, + "step": 7693 + }, + { + "epoch": 4.967075532601679, + "grad_norm": 1.6614724013669169, + "learning_rate": 3.4230518378780875e-05, + "loss": 0.913, + "step": 7694 + }, + { + "epoch": 4.967721110393803, + "grad_norm": 1.6340823299954776, + "learning_rate": 3.4189447891279086e-05, + "loss": 0.8165, + "step": 7695 + }, + { + "epoch": 4.9683666881859265, + "grad_norm": 1.867913694666717, + "learning_rate": 3.414839888783716e-05, + "loss": 1.1792, + "step": 7696 + }, + { + "epoch": 4.9690122659780505, + "grad_norm": 1.7385218057230778, + "learning_rate": 3.410737137606998e-05, + "loss": 0.921, + "step": 7697 + }, + { + "epoch": 4.969657843770174, + "grad_norm": 1.6710903785299647, + "learning_rate": 3.40663653635887e-05, + "loss": 0.8997, + "step": 7698 + }, + { + "epoch": 4.970303421562298, + "grad_norm": 1.8445679669587773, + "learning_rate": 3.402538085800035e-05, + "loss": 0.9682, + "step": 7699 + }, + { + "epoch": 4.970948999354422, + "grad_norm": 1.8524313322960222, + "learning_rate": 3.398441786690787e-05, + "loss": 1.0775, + "step": 7700 + }, + { + "epoch": 4.971594577146546, + "grad_norm": 1.7942970299645897, + "learning_rate": 3.394347639791045e-05, + "loss": 0.9816, + "step": 7701 + }, + { + "epoch": 4.97224015493867, + "grad_norm": 1.6326390172671623, + "learning_rate": 3.3902556458603136e-05, + "loss": 0.8601, + "step": 7702 + }, + { + "epoch": 4.972885732730794, + "grad_norm": 1.6806508414974612, + "learning_rate": 3.38616580565769e-05, + "loss": 0.9127, + "step": 7703 + }, + { + "epoch": 4.973531310522918, + "grad_norm": 1.642679374434472, + "learning_rate": 3.382078119941892e-05, + "loss": 0.8769, + "step": 7704 + }, + { + "epoch": 4.974176888315042, + "grad_norm": 1.983197267936567, + "learning_rate": 3.377992589471225e-05, + "loss": 1.0314, + "step": 7705 + }, + { + "epoch": 4.974822466107166, + "grad_norm": 1.7001442644912974, + "learning_rate": 3.373909215003599e-05, + "loss": 1.0011, + "step": 7706 + }, + { + "epoch": 4.97546804389929, + "grad_norm": 2.6362167583461456, + "learning_rate": 3.3698279972965196e-05, + "loss": 1.2788, + "step": 7707 + }, + { + "epoch": 4.976113621691414, + "grad_norm": 1.927566806712737, + "learning_rate": 3.365748937107097e-05, + "loss": 0.9488, + "step": 7708 + }, + { + "epoch": 4.976759199483538, + "grad_norm": 1.6211638440565903, + "learning_rate": 3.3616720351920364e-05, + "loss": 0.9268, + "step": 7709 + }, + { + "epoch": 4.9774047772756616, + "grad_norm": 1.7784089537384784, + "learning_rate": 3.35759729230765e-05, + "loss": 0.9101, + "step": 7710 + }, + { + "epoch": 4.9780503550677855, + "grad_norm": 1.8241832949987589, + "learning_rate": 3.3535247092098366e-05, + "loss": 1.0345, + "step": 7711 + }, + { + "epoch": 4.978695932859909, + "grad_norm": 2.7793414438170823, + "learning_rate": 3.349454286654117e-05, + "loss": 0.9887, + "step": 7712 + }, + { + "epoch": 4.979341510652033, + "grad_norm": 1.7497943671497882, + "learning_rate": 3.345386025395583e-05, + "loss": 1.0221, + "step": 7713 + }, + { + "epoch": 4.979987088444157, + "grad_norm": 1.9057211303202362, + "learning_rate": 3.3413199261889405e-05, + "loss": 1.0472, + "step": 7714 + }, + { + "epoch": 4.980632666236281, + "grad_norm": 1.5981021724950173, + "learning_rate": 3.337255989788507e-05, + "loss": 0.8933, + "step": 7715 + }, + { + "epoch": 4.981278244028405, + "grad_norm": 1.9620700613832185, + "learning_rate": 3.33319421694817e-05, + "loss": 1.0689, + "step": 7716 + }, + { + "epoch": 4.981923821820529, + "grad_norm": 1.7426544631738474, + "learning_rate": 3.32913460842143e-05, + "loss": 0.9677, + "step": 7717 + }, + { + "epoch": 4.982569399612653, + "grad_norm": 1.6858355567924186, + "learning_rate": 3.325077164961402e-05, + "loss": 0.9385, + "step": 7718 + }, + { + "epoch": 4.983214977404777, + "grad_norm": 1.8831329338323026, + "learning_rate": 3.321021887320766e-05, + "loss": 1.0676, + "step": 7719 + }, + { + "epoch": 4.983860555196901, + "grad_norm": 1.697757287567717, + "learning_rate": 3.31696877625183e-05, + "loss": 0.9216, + "step": 7720 + }, + { + "epoch": 4.984506132989026, + "grad_norm": 1.7765314253878803, + "learning_rate": 3.3129178325064887e-05, + "loss": 0.9717, + "step": 7721 + }, + { + "epoch": 4.98515171078115, + "grad_norm": 1.786055241048077, + "learning_rate": 3.30886905683622e-05, + "loss": 1.0182, + "step": 7722 + }, + { + "epoch": 4.9857972885732735, + "grad_norm": 1.7545279707431276, + "learning_rate": 3.304822449992128e-05, + "loss": 0.9829, + "step": 7723 + }, + { + "epoch": 4.9864428663653975, + "grad_norm": 1.807674894177251, + "learning_rate": 3.3007780127249005e-05, + "loss": 0.9781, + "step": 7724 + }, + { + "epoch": 4.987088444157521, + "grad_norm": 1.948277782753987, + "learning_rate": 3.2967357457848096e-05, + "loss": 0.8461, + "step": 7725 + }, + { + "epoch": 4.987734021949645, + "grad_norm": 1.6955238842043157, + "learning_rate": 3.292695649921751e-05, + "loss": 0.9347, + "step": 7726 + }, + { + "epoch": 4.988379599741769, + "grad_norm": 1.5496874945555583, + "learning_rate": 3.2886577258851996e-05, + "loss": 0.8471, + "step": 7727 + }, + { + "epoch": 4.989025177533893, + "grad_norm": 1.7612761376704016, + "learning_rate": 3.284621974424232e-05, + "loss": 1.0033, + "step": 7728 + }, + { + "epoch": 4.989670755326017, + "grad_norm": 1.960373087371953, + "learning_rate": 3.280588396287521e-05, + "loss": 1.0885, + "step": 7729 + }, + { + "epoch": 4.990316333118141, + "grad_norm": 1.812173545907187, + "learning_rate": 3.276556992223339e-05, + "loss": 0.97, + "step": 7730 + }, + { + "epoch": 4.990961910910265, + "grad_norm": 1.8553437360243703, + "learning_rate": 3.2725277629795526e-05, + "loss": 1.1638, + "step": 7731 + }, + { + "epoch": 4.991607488702389, + "grad_norm": 1.619728502037623, + "learning_rate": 3.268500709303627e-05, + "loss": 0.9237, + "step": 7732 + }, + { + "epoch": 4.992253066494513, + "grad_norm": 1.844934746470277, + "learning_rate": 3.264475831942621e-05, + "loss": 1.001, + "step": 7733 + }, + { + "epoch": 4.992898644286637, + "grad_norm": 1.499880639104303, + "learning_rate": 3.260453131643191e-05, + "loss": 0.7955, + "step": 7734 + }, + { + "epoch": 4.993544222078761, + "grad_norm": 1.8050134147490653, + "learning_rate": 3.256432609151589e-05, + "loss": 1.1119, + "step": 7735 + }, + { + "epoch": 4.994189799870885, + "grad_norm": 1.6723834420700374, + "learning_rate": 3.2524142652136627e-05, + "loss": 0.8073, + "step": 7736 + }, + { + "epoch": 4.994835377663009, + "grad_norm": 1.9216806168198415, + "learning_rate": 3.2483981005748654e-05, + "loss": 1.1204, + "step": 7737 + }, + { + "epoch": 4.9954809554551325, + "grad_norm": 1.674069507379496, + "learning_rate": 3.244384115980227e-05, + "loss": 0.9345, + "step": 7738 + }, + { + "epoch": 4.996126533247256, + "grad_norm": 1.803198488002074, + "learning_rate": 3.240372312174383e-05, + "loss": 0.9785, + "step": 7739 + }, + { + "epoch": 4.99677211103938, + "grad_norm": 1.799503288495518, + "learning_rate": 3.2363626899015767e-05, + "loss": 0.9241, + "step": 7740 + }, + { + "epoch": 4.997417688831504, + "grad_norm": 2.1043092028538815, + "learning_rate": 3.2323552499056226e-05, + "loss": 1.0192, + "step": 7741 + }, + { + "epoch": 4.998063266623628, + "grad_norm": 1.5976528348957135, + "learning_rate": 3.228349992929942e-05, + "loss": 0.8715, + "step": 7742 + }, + { + "epoch": 4.998708844415752, + "grad_norm": 1.8567003549941399, + "learning_rate": 3.224346919717564e-05, + "loss": 0.9485, + "step": 7743 + }, + { + "epoch": 4.999354422207876, + "grad_norm": 1.731178636564252, + "learning_rate": 3.2203460310110846e-05, + "loss": 0.8865, + "step": 7744 + }, + { + "epoch": 5.0, + "grad_norm": 2.1871145892583854, + "learning_rate": 3.2163473275527225e-05, + "loss": 1.1932, + "step": 7745 + }, + { + "epoch": 5.0, + "eval_loss": 2.3809738159179688, + "eval_runtime": 58.4305, + "eval_samples_per_second": 5.939, + "eval_steps_per_second": 5.939, + "step": 7745 + }, + { + "epoch": 5.000645577792124, + "grad_norm": 1.101573209824591, + "learning_rate": 3.2123508100842776e-05, + "loss": 0.5323, + "step": 7746 + }, + { + "epoch": 5.001291155584248, + "grad_norm": 2.0138601619873895, + "learning_rate": 3.208356479347132e-05, + "loss": 0.6039, + "step": 7747 + }, + { + "epoch": 5.001936733376372, + "grad_norm": 1.1136999416333802, + "learning_rate": 3.204364336082292e-05, + "loss": 0.5732, + "step": 7748 + }, + { + "epoch": 5.002582311168496, + "grad_norm": 1.1847835950763541, + "learning_rate": 3.2003743810303374e-05, + "loss": 0.5323, + "step": 7749 + }, + { + "epoch": 5.00322788896062, + "grad_norm": 1.1366461588095416, + "learning_rate": 3.1963866149314366e-05, + "loss": 0.5267, + "step": 7750 + }, + { + "epoch": 5.003873466752744, + "grad_norm": 1.2592494119008943, + "learning_rate": 3.192401038525373e-05, + "loss": 0.5511, + "step": 7751 + }, + { + "epoch": 5.0045190445448675, + "grad_norm": 1.1278068834918236, + "learning_rate": 3.188417652551508e-05, + "loss": 0.5601, + "step": 7752 + }, + { + "epoch": 5.005164622336991, + "grad_norm": 1.086377861107149, + "learning_rate": 3.184436457748801e-05, + "loss": 0.4807, + "step": 7753 + }, + { + "epoch": 5.005810200129115, + "grad_norm": 1.2581865134092338, + "learning_rate": 3.1804574548558065e-05, + "loss": 0.6222, + "step": 7754 + }, + { + "epoch": 5.006455777921239, + "grad_norm": 1.166298286994848, + "learning_rate": 3.176480644610668e-05, + "loss": 0.6196, + "step": 7755 + }, + { + "epoch": 5.007101355713363, + "grad_norm": 1.183341826987299, + "learning_rate": 3.172506027751128e-05, + "loss": 0.5061, + "step": 7756 + }, + { + "epoch": 5.007746933505487, + "grad_norm": 1.137538205583675, + "learning_rate": 3.168533605014518e-05, + "loss": 0.5645, + "step": 7757 + }, + { + "epoch": 5.008392511297611, + "grad_norm": 1.1227399937335698, + "learning_rate": 3.1645633771377636e-05, + "loss": 0.5044, + "step": 7758 + }, + { + "epoch": 5.009038089089735, + "grad_norm": 1.139542043299015, + "learning_rate": 3.160595344857384e-05, + "loss": 0.4779, + "step": 7759 + }, + { + "epoch": 5.009683666881859, + "grad_norm": 1.217975914267146, + "learning_rate": 3.1566295089094885e-05, + "loss": 0.5517, + "step": 7760 + }, + { + "epoch": 5.010329244673983, + "grad_norm": 1.6010249907550045, + "learning_rate": 3.152665870029779e-05, + "loss": 0.5984, + "step": 7761 + }, + { + "epoch": 5.010974822466107, + "grad_norm": 1.189411166814406, + "learning_rate": 3.148704428953561e-05, + "loss": 0.5199, + "step": 7762 + }, + { + "epoch": 5.011620400258231, + "grad_norm": 1.2419534407546966, + "learning_rate": 3.1447451864157134e-05, + "loss": 0.6018, + "step": 7763 + }, + { + "epoch": 5.012265978050355, + "grad_norm": 1.2638251191100878, + "learning_rate": 3.140788143150716e-05, + "loss": 0.4794, + "step": 7764 + }, + { + "epoch": 5.012911555842479, + "grad_norm": 1.2628614720333529, + "learning_rate": 3.1368332998926544e-05, + "loss": 0.5752, + "step": 7765 + }, + { + "epoch": 5.0135571336346025, + "grad_norm": 1.1624858203630797, + "learning_rate": 3.13288065737518e-05, + "loss": 0.5046, + "step": 7766 + }, + { + "epoch": 5.014202711426727, + "grad_norm": 1.2040148551457437, + "learning_rate": 3.128930216331549e-05, + "loss": 0.5362, + "step": 7767 + }, + { + "epoch": 5.014848289218851, + "grad_norm": 1.5460336590535717, + "learning_rate": 3.124981977494621e-05, + "loss": 0.5857, + "step": 7768 + }, + { + "epoch": 5.015493867010975, + "grad_norm": 1.3534359994196894, + "learning_rate": 3.1210359415968193e-05, + "loss": 0.5817, + "step": 7769 + }, + { + "epoch": 5.016139444803099, + "grad_norm": 1.3226824409172444, + "learning_rate": 3.1170921093701875e-05, + "loss": 0.5216, + "step": 7770 + }, + { + "epoch": 5.016785022595223, + "grad_norm": 1.2791220202060565, + "learning_rate": 3.113150481546347e-05, + "loss": 0.5553, + "step": 7771 + }, + { + "epoch": 5.017430600387347, + "grad_norm": 1.4413728307125344, + "learning_rate": 3.109211058856499e-05, + "loss": 0.5742, + "step": 7772 + }, + { + "epoch": 5.018076178179471, + "grad_norm": 1.288109715689227, + "learning_rate": 3.105273842031458e-05, + "loss": 0.5332, + "step": 7773 + }, + { + "epoch": 5.018721755971595, + "grad_norm": 1.3444771668201736, + "learning_rate": 3.101338831801619e-05, + "loss": 0.5438, + "step": 7774 + }, + { + "epoch": 5.019367333763719, + "grad_norm": 1.3031904664407346, + "learning_rate": 3.0974060288969575e-05, + "loss": 0.5705, + "step": 7775 + }, + { + "epoch": 5.020012911555843, + "grad_norm": 1.4742633816221635, + "learning_rate": 3.093475434047059e-05, + "loss": 0.5506, + "step": 7776 + }, + { + "epoch": 5.020658489347967, + "grad_norm": 1.3890328183889034, + "learning_rate": 3.089547047981085e-05, + "loss": 0.579, + "step": 7777 + }, + { + "epoch": 5.021304067140091, + "grad_norm": 1.5860684616827738, + "learning_rate": 3.085620871427793e-05, + "loss": 0.5023, + "step": 7778 + }, + { + "epoch": 5.0219496449322145, + "grad_norm": 1.4026934467254872, + "learning_rate": 3.081696905115531e-05, + "loss": 0.5578, + "step": 7779 + }, + { + "epoch": 5.0225952227243384, + "grad_norm": 1.4231905292666331, + "learning_rate": 3.077775149772232e-05, + "loss": 0.439, + "step": 7780 + }, + { + "epoch": 5.023240800516462, + "grad_norm": 1.443004960206405, + "learning_rate": 3.0738556061254255e-05, + "loss": 0.5802, + "step": 7781 + }, + { + "epoch": 5.023886378308586, + "grad_norm": 1.4212571754932624, + "learning_rate": 3.069938274902227e-05, + "loss": 0.5556, + "step": 7782 + }, + { + "epoch": 5.02453195610071, + "grad_norm": 1.3613750949022247, + "learning_rate": 3.066023156829341e-05, + "loss": 0.5026, + "step": 7783 + }, + { + "epoch": 5.025177533892834, + "grad_norm": 1.3684437129466596, + "learning_rate": 3.0621102526330637e-05, + "loss": 0.4836, + "step": 7784 + }, + { + "epoch": 5.025823111684958, + "grad_norm": 1.329639526167068, + "learning_rate": 3.058199563039278e-05, + "loss": 0.5274, + "step": 7785 + }, + { + "epoch": 5.026468689477082, + "grad_norm": 1.214174880611069, + "learning_rate": 3.0542910887734536e-05, + "loss": 0.5158, + "step": 7786 + }, + { + "epoch": 5.027114267269206, + "grad_norm": 1.785803604604789, + "learning_rate": 3.0503848305606664e-05, + "loss": 0.5316, + "step": 7787 + }, + { + "epoch": 5.02775984506133, + "grad_norm": 1.3161233688914729, + "learning_rate": 3.046480789125556e-05, + "loss": 0.5348, + "step": 7788 + }, + { + "epoch": 5.028405422853454, + "grad_norm": 1.3847109565335025, + "learning_rate": 3.042578965192361e-05, + "loss": 0.5235, + "step": 7789 + }, + { + "epoch": 5.029051000645578, + "grad_norm": 1.3370308867370917, + "learning_rate": 3.038679359484924e-05, + "loss": 0.5805, + "step": 7790 + }, + { + "epoch": 5.029696578437702, + "grad_norm": 1.5105325860604362, + "learning_rate": 3.0347819727266483e-05, + "loss": 0.6863, + "step": 7791 + }, + { + "epoch": 5.030342156229826, + "grad_norm": 1.5140880174377203, + "learning_rate": 3.03088680564054e-05, + "loss": 0.6315, + "step": 7792 + }, + { + "epoch": 5.0309877340219495, + "grad_norm": 1.4138906776213351, + "learning_rate": 3.0269938589492065e-05, + "loss": 0.5396, + "step": 7793 + }, + { + "epoch": 5.0316333118140735, + "grad_norm": 1.2804891023829175, + "learning_rate": 3.0231031333748136e-05, + "loss": 0.5166, + "step": 7794 + }, + { + "epoch": 5.032278889606197, + "grad_norm": 1.2855801144346424, + "learning_rate": 3.019214629639142e-05, + "loss": 0.4506, + "step": 7795 + }, + { + "epoch": 5.032924467398321, + "grad_norm": 1.2516754989138088, + "learning_rate": 3.015328348463551e-05, + "loss": 0.5323, + "step": 7796 + }, + { + "epoch": 5.033570045190445, + "grad_norm": 1.3892509182022064, + "learning_rate": 3.0114442905689727e-05, + "loss": 0.6693, + "step": 7797 + }, + { + "epoch": 5.034215622982569, + "grad_norm": 1.3075973130434675, + "learning_rate": 3.0075624566759526e-05, + "loss": 0.4808, + "step": 7798 + }, + { + "epoch": 5.034861200774693, + "grad_norm": 1.3446229579602564, + "learning_rate": 3.0036828475046126e-05, + "loss": 0.5954, + "step": 7799 + }, + { + "epoch": 5.035506778566817, + "grad_norm": 1.3404653986834656, + "learning_rate": 2.9998054637746455e-05, + "loss": 0.5633, + "step": 7800 + }, + { + "epoch": 5.036152356358941, + "grad_norm": 1.357591631311278, + "learning_rate": 2.9959303062053625e-05, + "loss": 0.6105, + "step": 7801 + }, + { + "epoch": 5.036797934151065, + "grad_norm": 1.4680059513267167, + "learning_rate": 2.992057375515637e-05, + "loss": 0.4826, + "step": 7802 + }, + { + "epoch": 5.037443511943189, + "grad_norm": 1.3908251239296379, + "learning_rate": 2.9881866724239407e-05, + "loss": 0.6572, + "step": 7803 + }, + { + "epoch": 5.038089089735313, + "grad_norm": 1.445915546037406, + "learning_rate": 2.9843181976483275e-05, + "loss": 0.5557, + "step": 7804 + }, + { + "epoch": 5.038734667527437, + "grad_norm": 1.3883847761920913, + "learning_rate": 2.9804519519064424e-05, + "loss": 0.5204, + "step": 7805 + }, + { + "epoch": 5.039380245319561, + "grad_norm": 1.2796542179153982, + "learning_rate": 2.976587935915511e-05, + "loss": 0.4974, + "step": 7806 + }, + { + "epoch": 5.040025823111685, + "grad_norm": 1.2467172404301263, + "learning_rate": 2.97272615039235e-05, + "loss": 0.4647, + "step": 7807 + }, + { + "epoch": 5.0406714009038085, + "grad_norm": 1.356794927436427, + "learning_rate": 2.9688665960533614e-05, + "loss": 0.5536, + "step": 7808 + }, + { + "epoch": 5.041316978695932, + "grad_norm": 1.265747911191676, + "learning_rate": 2.965009273614531e-05, + "loss": 0.4966, + "step": 7809 + }, + { + "epoch": 5.041962556488057, + "grad_norm": 1.3534188577565742, + "learning_rate": 2.9611541837914327e-05, + "loss": 0.504, + "step": 7810 + }, + { + "epoch": 5.042608134280181, + "grad_norm": 1.3207556750377052, + "learning_rate": 2.9573013272992212e-05, + "loss": 0.4955, + "step": 7811 + }, + { + "epoch": 5.043253712072305, + "grad_norm": 1.233466484997427, + "learning_rate": 2.9534507048526534e-05, + "loss": 0.4574, + "step": 7812 + }, + { + "epoch": 5.043899289864429, + "grad_norm": 1.4814905144203943, + "learning_rate": 2.9496023171660493e-05, + "loss": 0.5386, + "step": 7813 + }, + { + "epoch": 5.044544867656553, + "grad_norm": 1.3339147399884304, + "learning_rate": 2.9457561649533247e-05, + "loss": 0.5195, + "step": 7814 + }, + { + "epoch": 5.045190445448677, + "grad_norm": 1.2857892404220972, + "learning_rate": 2.94191224892799e-05, + "loss": 0.4846, + "step": 7815 + }, + { + "epoch": 5.045836023240801, + "grad_norm": 1.1390000626170202, + "learning_rate": 2.938070569803122e-05, + "loss": 0.4659, + "step": 7816 + }, + { + "epoch": 5.046481601032925, + "grad_norm": 1.2514180651630347, + "learning_rate": 2.9342311282913923e-05, + "loss": 0.5492, + "step": 7817 + }, + { + "epoch": 5.047127178825049, + "grad_norm": 1.354798544700313, + "learning_rate": 2.9303939251050675e-05, + "loss": 0.58, + "step": 7818 + }, + { + "epoch": 5.047772756617173, + "grad_norm": 1.297599962058562, + "learning_rate": 2.9265589609559732e-05, + "loss": 0.5494, + "step": 7819 + }, + { + "epoch": 5.0484183344092965, + "grad_norm": 1.6382849557234258, + "learning_rate": 2.9227262365555486e-05, + "loss": 0.5919, + "step": 7820 + }, + { + "epoch": 5.0490639122014205, + "grad_norm": 1.3801369803122567, + "learning_rate": 2.9188957526148004e-05, + "loss": 0.5808, + "step": 7821 + }, + { + "epoch": 5.049709489993544, + "grad_norm": 1.3659757165517734, + "learning_rate": 2.915067509844315e-05, + "loss": 0.467, + "step": 7822 + }, + { + "epoch": 5.050355067785668, + "grad_norm": 1.5289024512090734, + "learning_rate": 2.9112415089542808e-05, + "loss": 0.5767, + "step": 7823 + }, + { + "epoch": 5.051000645577792, + "grad_norm": 1.4005292202712798, + "learning_rate": 2.9074177506544616e-05, + "loss": 0.581, + "step": 7824 + }, + { + "epoch": 5.051646223369916, + "grad_norm": 1.2013218412919415, + "learning_rate": 2.903596235654192e-05, + "loss": 0.4761, + "step": 7825 + }, + { + "epoch": 5.05229180116204, + "grad_norm": 1.2890951545981164, + "learning_rate": 2.8997769646624146e-05, + "loss": 0.5276, + "step": 7826 + }, + { + "epoch": 5.052937378954164, + "grad_norm": 1.293338691070467, + "learning_rate": 2.8959599383876413e-05, + "loss": 0.5147, + "step": 7827 + }, + { + "epoch": 5.053582956746288, + "grad_norm": 1.2436867187963123, + "learning_rate": 2.8921451575379673e-05, + "loss": 0.5256, + "step": 7828 + }, + { + "epoch": 5.054228534538412, + "grad_norm": 1.3578341474973814, + "learning_rate": 2.8883326228210747e-05, + "loss": 0.497, + "step": 7829 + }, + { + "epoch": 5.054874112330536, + "grad_norm": 1.4537201903294201, + "learning_rate": 2.884522334944231e-05, + "loss": 0.5694, + "step": 7830 + }, + { + "epoch": 5.05551969012266, + "grad_norm": 1.2353629079770625, + "learning_rate": 2.8807142946142808e-05, + "loss": 0.4907, + "step": 7831 + }, + { + "epoch": 5.056165267914784, + "grad_norm": 1.2936230364324153, + "learning_rate": 2.8769085025376576e-05, + "loss": 0.5237, + "step": 7832 + }, + { + "epoch": 5.056810845706908, + "grad_norm": 1.3579751526222765, + "learning_rate": 2.8731049594203696e-05, + "loss": 0.5505, + "step": 7833 + }, + { + "epoch": 5.057456423499032, + "grad_norm": 1.4418683349618493, + "learning_rate": 2.8693036659680264e-05, + "loss": 0.5186, + "step": 7834 + }, + { + "epoch": 5.0581020012911555, + "grad_norm": 1.3964861427676953, + "learning_rate": 2.8655046228857964e-05, + "loss": 0.543, + "step": 7835 + }, + { + "epoch": 5.058747579083279, + "grad_norm": 1.2227256299174256, + "learning_rate": 2.8617078308784398e-05, + "loss": 0.4806, + "step": 7836 + }, + { + "epoch": 5.059393156875403, + "grad_norm": 1.4006147061256304, + "learning_rate": 2.8579132906503127e-05, + "loss": 0.5474, + "step": 7837 + }, + { + "epoch": 5.060038734667527, + "grad_norm": 1.2929317903115627, + "learning_rate": 2.8541210029053306e-05, + "loss": 0.5894, + "step": 7838 + }, + { + "epoch": 5.060684312459651, + "grad_norm": 2.168106730377126, + "learning_rate": 2.8503309683470028e-05, + "loss": 0.508, + "step": 7839 + }, + { + "epoch": 5.061329890251775, + "grad_norm": 1.8338477871900283, + "learning_rate": 2.846543187678431e-05, + "loss": 0.5536, + "step": 7840 + }, + { + "epoch": 5.061975468043899, + "grad_norm": 1.4231438600555661, + "learning_rate": 2.8427576616022756e-05, + "loss": 0.658, + "step": 7841 + }, + { + "epoch": 5.062621045836023, + "grad_norm": 1.3441220124608233, + "learning_rate": 2.8389743908207928e-05, + "loss": 0.499, + "step": 7842 + }, + { + "epoch": 5.063266623628147, + "grad_norm": 1.4246475913736156, + "learning_rate": 2.8351933760358298e-05, + "loss": 0.5487, + "step": 7843 + }, + { + "epoch": 5.063912201420271, + "grad_norm": 1.1919334179572798, + "learning_rate": 2.8314146179487867e-05, + "loss": 0.441, + "step": 7844 + }, + { + "epoch": 5.064557779212395, + "grad_norm": 1.4777179767531237, + "learning_rate": 2.8276381172606765e-05, + "loss": 0.5841, + "step": 7845 + }, + { + "epoch": 5.065203357004519, + "grad_norm": 1.476750226792022, + "learning_rate": 2.823863874672077e-05, + "loss": 0.5512, + "step": 7846 + }, + { + "epoch": 5.065848934796643, + "grad_norm": 1.2417046362888795, + "learning_rate": 2.82009189088314e-05, + "loss": 0.47, + "step": 7847 + }, + { + "epoch": 5.066494512588767, + "grad_norm": 1.4221797560259688, + "learning_rate": 2.816322166593617e-05, + "loss": 0.539, + "step": 7848 + }, + { + "epoch": 5.0671400903808905, + "grad_norm": 1.28097771121503, + "learning_rate": 2.812554702502834e-05, + "loss": 0.5284, + "step": 7849 + }, + { + "epoch": 5.0677856681730145, + "grad_norm": 1.39408839474073, + "learning_rate": 2.80878949930968e-05, + "loss": 0.6226, + "step": 7850 + }, + { + "epoch": 5.068431245965138, + "grad_norm": 1.1426188792757044, + "learning_rate": 2.8050265577126514e-05, + "loss": 0.4147, + "step": 7851 + }, + { + "epoch": 5.069076823757262, + "grad_norm": 1.3616098683510973, + "learning_rate": 2.8012658784098113e-05, + "loss": 0.5354, + "step": 7852 + }, + { + "epoch": 5.069722401549387, + "grad_norm": 1.4048584539134643, + "learning_rate": 2.797507462098802e-05, + "loss": 0.4982, + "step": 7853 + }, + { + "epoch": 5.070367979341511, + "grad_norm": 1.3081358488857804, + "learning_rate": 2.7937513094768503e-05, + "loss": 0.5318, + "step": 7854 + }, + { + "epoch": 5.071013557133635, + "grad_norm": 1.5486000049285287, + "learning_rate": 2.7899974212407612e-05, + "loss": 0.5504, + "step": 7855 + }, + { + "epoch": 5.071659134925759, + "grad_norm": 1.4237481744992095, + "learning_rate": 2.7862457980869203e-05, + "loss": 0.5029, + "step": 7856 + }, + { + "epoch": 5.072304712717883, + "grad_norm": 1.298649287050156, + "learning_rate": 2.782496440711291e-05, + "loss": 0.5426, + "step": 7857 + }, + { + "epoch": 5.072950290510007, + "grad_norm": 1.3971454949606137, + "learning_rate": 2.7787493498094173e-05, + "loss": 0.5674, + "step": 7858 + }, + { + "epoch": 5.073595868302131, + "grad_norm": 1.3680827168070637, + "learning_rate": 2.7750045260764324e-05, + "loss": 0.4855, + "step": 7859 + }, + { + "epoch": 5.074241446094255, + "grad_norm": 1.3397742908029426, + "learning_rate": 2.77126197020703e-05, + "loss": 0.5591, + "step": 7860 + }, + { + "epoch": 5.074887023886379, + "grad_norm": 1.534241571413326, + "learning_rate": 2.7675216828954905e-05, + "loss": 0.6432, + "step": 7861 + }, + { + "epoch": 5.0755326016785025, + "grad_norm": 1.4025170705208914, + "learning_rate": 2.763783664835691e-05, + "loss": 0.5644, + "step": 7862 + }, + { + "epoch": 5.076178179470626, + "grad_norm": 1.392171759598, + "learning_rate": 2.7600479167210582e-05, + "loss": 0.5056, + "step": 7863 + }, + { + "epoch": 5.07682375726275, + "grad_norm": 1.3604277987445297, + "learning_rate": 2.7563144392446147e-05, + "loss": 0.5967, + "step": 7864 + }, + { + "epoch": 5.077469335054874, + "grad_norm": 1.2175612962827094, + "learning_rate": 2.752583233098968e-05, + "loss": 0.4838, + "step": 7865 + }, + { + "epoch": 5.078114912846998, + "grad_norm": 1.4047872976887101, + "learning_rate": 2.7488542989762875e-05, + "loss": 0.5958, + "step": 7866 + }, + { + "epoch": 5.078760490639122, + "grad_norm": 1.1871496777138661, + "learning_rate": 2.7451276375683266e-05, + "loss": 0.4481, + "step": 7867 + }, + { + "epoch": 5.079406068431246, + "grad_norm": 1.3134416710320775, + "learning_rate": 2.741403249566431e-05, + "loss": 0.4987, + "step": 7868 + }, + { + "epoch": 5.08005164622337, + "grad_norm": 1.2613297058503299, + "learning_rate": 2.7376811356614998e-05, + "loss": 0.5113, + "step": 7869 + }, + { + "epoch": 5.080697224015494, + "grad_norm": 1.3695535011625524, + "learning_rate": 2.7339612965440348e-05, + "loss": 0.5395, + "step": 7870 + }, + { + "epoch": 5.081342801807618, + "grad_norm": 1.2446554748304837, + "learning_rate": 2.7302437329041034e-05, + "loss": 0.4751, + "step": 7871 + }, + { + "epoch": 5.081988379599742, + "grad_norm": 1.3770571000551108, + "learning_rate": 2.726528445431343e-05, + "loss": 0.4879, + "step": 7872 + }, + { + "epoch": 5.082633957391866, + "grad_norm": 1.3070644785622383, + "learning_rate": 2.7228154348149876e-05, + "loss": 0.4942, + "step": 7873 + }, + { + "epoch": 5.08327953518399, + "grad_norm": 1.3373499417470323, + "learning_rate": 2.71910470174384e-05, + "loss": 0.5666, + "step": 7874 + }, + { + "epoch": 5.083925112976114, + "grad_norm": 1.3868318679151173, + "learning_rate": 2.7153962469062672e-05, + "loss": 0.4964, + "step": 7875 + }, + { + "epoch": 5.0845706907682375, + "grad_norm": 1.441741788282523, + "learning_rate": 2.7116900709902394e-05, + "loss": 0.5413, + "step": 7876 + }, + { + "epoch": 5.0852162685603615, + "grad_norm": 1.4808944135794408, + "learning_rate": 2.7079861746832864e-05, + "loss": 0.5577, + "step": 7877 + }, + { + "epoch": 5.085861846352485, + "grad_norm": 1.3083302991476102, + "learning_rate": 2.704284558672517e-05, + "loss": 0.528, + "step": 7878 + }, + { + "epoch": 5.086507424144609, + "grad_norm": 1.1874116079709545, + "learning_rate": 2.7005852236446236e-05, + "loss": 0.4381, + "step": 7879 + }, + { + "epoch": 5.087153001936733, + "grad_norm": 1.1462301897291824, + "learning_rate": 2.696888170285867e-05, + "loss": 0.4344, + "step": 7880 + }, + { + "epoch": 5.087798579728857, + "grad_norm": 1.2438472611244127, + "learning_rate": 2.693193399282092e-05, + "loss": 0.5068, + "step": 7881 + }, + { + "epoch": 5.088444157520981, + "grad_norm": 1.1580466903718265, + "learning_rate": 2.689500911318717e-05, + "loss": 0.4449, + "step": 7882 + }, + { + "epoch": 5.089089735313105, + "grad_norm": 1.420938110578931, + "learning_rate": 2.685810707080731e-05, + "loss": 0.5727, + "step": 7883 + }, + { + "epoch": 5.089735313105229, + "grad_norm": 1.393544864000689, + "learning_rate": 2.682122787252718e-05, + "loss": 0.5583, + "step": 7884 + }, + { + "epoch": 5.090380890897353, + "grad_norm": 1.7230846181450867, + "learning_rate": 2.6784371525188137e-05, + "loss": 0.6482, + "step": 7885 + }, + { + "epoch": 5.091026468689477, + "grad_norm": 1.499143610104694, + "learning_rate": 2.6747538035627397e-05, + "loss": 0.5156, + "step": 7886 + }, + { + "epoch": 5.091672046481601, + "grad_norm": 1.3151398921639912, + "learning_rate": 2.6710727410678113e-05, + "loss": 0.5062, + "step": 7887 + }, + { + "epoch": 5.092317624273725, + "grad_norm": 1.2629514217841067, + "learning_rate": 2.6673939657168885e-05, + "loss": 0.5105, + "step": 7888 + }, + { + "epoch": 5.092963202065849, + "grad_norm": 1.234630369121181, + "learning_rate": 2.663717478192424e-05, + "loss": 0.4621, + "step": 7889 + }, + { + "epoch": 5.093608779857973, + "grad_norm": 1.4475285231990616, + "learning_rate": 2.660043279176456e-05, + "loss": 0.5063, + "step": 7890 + }, + { + "epoch": 5.0942543576500965, + "grad_norm": 1.3698932662693226, + "learning_rate": 2.6563713693505735e-05, + "loss": 0.5031, + "step": 7891 + }, + { + "epoch": 5.09489993544222, + "grad_norm": 1.5673815976373098, + "learning_rate": 2.652701749395955e-05, + "loss": 0.7062, + "step": 7892 + }, + { + "epoch": 5.095545513234344, + "grad_norm": 1.2519706114978213, + "learning_rate": 2.649034419993365e-05, + "loss": 0.5196, + "step": 7893 + }, + { + "epoch": 5.096191091026468, + "grad_norm": 1.2983268637684644, + "learning_rate": 2.645369381823117e-05, + "loss": 0.4391, + "step": 7894 + }, + { + "epoch": 5.096836668818592, + "grad_norm": 1.4330705364792071, + "learning_rate": 2.6417066355651206e-05, + "loss": 0.6248, + "step": 7895 + }, + { + "epoch": 5.097482246610717, + "grad_norm": 1.8158234881880198, + "learning_rate": 2.638046181898859e-05, + "loss": 0.5072, + "step": 7896 + }, + { + "epoch": 5.098127824402841, + "grad_norm": 1.3582409157094895, + "learning_rate": 2.6343880215033692e-05, + "loss": 0.4821, + "step": 7897 + }, + { + "epoch": 5.098773402194965, + "grad_norm": 1.5054258019145892, + "learning_rate": 2.6307321550572895e-05, + "loss": 0.5964, + "step": 7898 + }, + { + "epoch": 5.099418979987089, + "grad_norm": 1.2841358038526343, + "learning_rate": 2.627078583238823e-05, + "loss": 0.494, + "step": 7899 + }, + { + "epoch": 5.100064557779213, + "grad_norm": 1.3464138099777008, + "learning_rate": 2.6234273067257323e-05, + "loss": 0.5301, + "step": 7900 + }, + { + "epoch": 5.100710135571337, + "grad_norm": 1.4674744261105739, + "learning_rate": 2.6197783261953787e-05, + "loss": 0.537, + "step": 7901 + }, + { + "epoch": 5.101355713363461, + "grad_norm": 1.3970578283677195, + "learning_rate": 2.616131642324681e-05, + "loss": 0.4765, + "step": 7902 + }, + { + "epoch": 5.1020012911555845, + "grad_norm": 1.3576259644373538, + "learning_rate": 2.6124872557901393e-05, + "loss": 0.4972, + "step": 7903 + }, + { + "epoch": 5.1026468689477085, + "grad_norm": 1.4095402719718526, + "learning_rate": 2.608845167267825e-05, + "loss": 0.5497, + "step": 7904 + }, + { + "epoch": 5.103292446739832, + "grad_norm": 1.38306843258331, + "learning_rate": 2.6052053774333826e-05, + "loss": 0.4977, + "step": 7905 + }, + { + "epoch": 5.103938024531956, + "grad_norm": 1.7437209207511117, + "learning_rate": 2.60156788696203e-05, + "loss": 0.6186, + "step": 7906 + }, + { + "epoch": 5.10458360232408, + "grad_norm": 1.4106225743169234, + "learning_rate": 2.5979326965285606e-05, + "loss": 0.5177, + "step": 7907 + }, + { + "epoch": 5.105229180116204, + "grad_norm": 1.442968452753664, + "learning_rate": 2.5942998068073355e-05, + "loss": 0.4898, + "step": 7908 + }, + { + "epoch": 5.105874757908328, + "grad_norm": 1.2941975677904203, + "learning_rate": 2.5906692184723055e-05, + "loss": 0.4928, + "step": 7909 + }, + { + "epoch": 5.106520335700452, + "grad_norm": 1.2734795208069516, + "learning_rate": 2.58704093219697e-05, + "loss": 0.4515, + "step": 7910 + }, + { + "epoch": 5.107165913492576, + "grad_norm": 1.403194182787182, + "learning_rate": 2.5834149486544136e-05, + "loss": 0.5866, + "step": 7911 + }, + { + "epoch": 5.1078114912847, + "grad_norm": 1.3573020534169555, + "learning_rate": 2.579791268517307e-05, + "loss": 0.5288, + "step": 7912 + }, + { + "epoch": 5.108457069076824, + "grad_norm": 1.2924334727278064, + "learning_rate": 2.5761698924578694e-05, + "loss": 0.5081, + "step": 7913 + }, + { + "epoch": 5.109102646868948, + "grad_norm": 1.7051197180940452, + "learning_rate": 2.5725508211479012e-05, + "loss": 0.5455, + "step": 7914 + }, + { + "epoch": 5.109748224661072, + "grad_norm": 1.3421865249328728, + "learning_rate": 2.5689340552587907e-05, + "loss": 0.567, + "step": 7915 + }, + { + "epoch": 5.110393802453196, + "grad_norm": 1.5465192276275215, + "learning_rate": 2.5653195954614707e-05, + "loss": 0.5056, + "step": 7916 + }, + { + "epoch": 5.11103938024532, + "grad_norm": 1.3015309024398978, + "learning_rate": 2.5617074424264707e-05, + "loss": 0.5076, + "step": 7917 + }, + { + "epoch": 5.1116849580374435, + "grad_norm": 1.3720024931676258, + "learning_rate": 2.558097596823884e-05, + "loss": 0.5024, + "step": 7918 + }, + { + "epoch": 5.112330535829567, + "grad_norm": 1.593774774483456, + "learning_rate": 2.554490059323363e-05, + "loss": 0.565, + "step": 7919 + }, + { + "epoch": 5.112976113621691, + "grad_norm": 1.2366186298813207, + "learning_rate": 2.5508848305941548e-05, + "loss": 0.4515, + "step": 7920 + }, + { + "epoch": 5.113621691413815, + "grad_norm": 1.3224516987456156, + "learning_rate": 2.547281911305064e-05, + "loss": 0.5329, + "step": 7921 + }, + { + "epoch": 5.114267269205939, + "grad_norm": 1.2454656630211192, + "learning_rate": 2.543681302124463e-05, + "loss": 0.473, + "step": 7922 + }, + { + "epoch": 5.114912846998063, + "grad_norm": 1.5100719077463758, + "learning_rate": 2.54008300372031e-05, + "loss": 0.5505, + "step": 7923 + }, + { + "epoch": 5.115558424790187, + "grad_norm": 1.5028233713017394, + "learning_rate": 2.5364870167601287e-05, + "loss": 0.7052, + "step": 7924 + }, + { + "epoch": 5.116204002582311, + "grad_norm": 1.352992999129196, + "learning_rate": 2.5328933419109982e-05, + "loss": 0.554, + "step": 7925 + }, + { + "epoch": 5.116849580374435, + "grad_norm": 1.3669391068154693, + "learning_rate": 2.529301979839597e-05, + "loss": 0.5332, + "step": 7926 + }, + { + "epoch": 5.117495158166559, + "grad_norm": 1.4741684566702753, + "learning_rate": 2.525712931212154e-05, + "loss": 0.6245, + "step": 7927 + }, + { + "epoch": 5.118140735958683, + "grad_norm": 1.4501794139821869, + "learning_rate": 2.522126196694476e-05, + "loss": 0.5138, + "step": 7928 + }, + { + "epoch": 5.118786313750807, + "grad_norm": 1.20752054881766, + "learning_rate": 2.5185417769519384e-05, + "loss": 0.4984, + "step": 7929 + }, + { + "epoch": 5.119431891542931, + "grad_norm": 1.2099856347456832, + "learning_rate": 2.5149596726494903e-05, + "loss": 0.4848, + "step": 7930 + }, + { + "epoch": 5.120077469335055, + "grad_norm": 1.2947054200580865, + "learning_rate": 2.511379884451647e-05, + "loss": 0.5217, + "step": 7931 + }, + { + "epoch": 5.1207230471271785, + "grad_norm": 1.206917357974297, + "learning_rate": 2.507802413022497e-05, + "loss": 0.4894, + "step": 7932 + }, + { + "epoch": 5.1213686249193024, + "grad_norm": 1.2570393487185714, + "learning_rate": 2.504227259025695e-05, + "loss": 0.4836, + "step": 7933 + }, + { + "epoch": 5.122014202711426, + "grad_norm": 1.371479238372567, + "learning_rate": 2.5006544231244818e-05, + "loss": 0.5775, + "step": 7934 + }, + { + "epoch": 5.12265978050355, + "grad_norm": 1.3542837053225352, + "learning_rate": 2.497083905981642e-05, + "loss": 0.4786, + "step": 7935 + }, + { + "epoch": 5.123305358295674, + "grad_norm": 3.751678344156416, + "learning_rate": 2.4935157082595447e-05, + "loss": 0.5533, + "step": 7936 + }, + { + "epoch": 5.123950936087798, + "grad_norm": 1.4187567641177763, + "learning_rate": 2.4899498306201383e-05, + "loss": 0.5018, + "step": 7937 + }, + { + "epoch": 5.124596513879922, + "grad_norm": 1.2677716369074876, + "learning_rate": 2.4863862737249183e-05, + "loss": 0.5296, + "step": 7938 + }, + { + "epoch": 5.125242091672046, + "grad_norm": 1.3939011929284608, + "learning_rate": 2.482825038234965e-05, + "loss": 0.5654, + "step": 7939 + }, + { + "epoch": 5.125887669464171, + "grad_norm": 1.3356206463643083, + "learning_rate": 2.4792661248109314e-05, + "loss": 0.5347, + "step": 7940 + }, + { + "epoch": 5.126533247256295, + "grad_norm": 1.5126901945713795, + "learning_rate": 2.4757095341130207e-05, + "loss": 0.6246, + "step": 7941 + }, + { + "epoch": 5.127178825048419, + "grad_norm": 1.2724422463346996, + "learning_rate": 2.472155266801027e-05, + "loss": 0.4742, + "step": 7942 + }, + { + "epoch": 5.127824402840543, + "grad_norm": 1.4061942028486307, + "learning_rate": 2.4686033235343043e-05, + "loss": 0.4711, + "step": 7943 + }, + { + "epoch": 5.128469980632667, + "grad_norm": 1.2382385827623077, + "learning_rate": 2.465053704971766e-05, + "loss": 0.4692, + "step": 7944 + }, + { + "epoch": 5.1291155584247905, + "grad_norm": 1.2193601255477011, + "learning_rate": 2.4615064117719103e-05, + "loss": 0.5133, + "step": 7945 + }, + { + "epoch": 5.129761136216914, + "grad_norm": 1.4615669248636929, + "learning_rate": 2.4579614445928002e-05, + "loss": 0.5862, + "step": 7946 + }, + { + "epoch": 5.130406714009038, + "grad_norm": 1.2523599980243667, + "learning_rate": 2.4544188040920502e-05, + "loss": 0.5161, + "step": 7947 + }, + { + "epoch": 5.131052291801162, + "grad_norm": 1.6853075113509404, + "learning_rate": 2.4508784909268698e-05, + "loss": 0.5358, + "step": 7948 + }, + { + "epoch": 5.131697869593286, + "grad_norm": 1.3361501480481661, + "learning_rate": 2.4473405057540245e-05, + "loss": 0.5316, + "step": 7949 + }, + { + "epoch": 5.13234344738541, + "grad_norm": 1.2003960955594468, + "learning_rate": 2.4438048492298334e-05, + "loss": 0.4987, + "step": 7950 + }, + { + "epoch": 5.132989025177534, + "grad_norm": 1.4190104721252625, + "learning_rate": 2.4402715220102115e-05, + "loss": 0.5282, + "step": 7951 + }, + { + "epoch": 5.133634602969658, + "grad_norm": 1.424503922928602, + "learning_rate": 2.436740524750623e-05, + "loss": 0.6133, + "step": 7952 + }, + { + "epoch": 5.134280180761782, + "grad_norm": 1.4555533586820841, + "learning_rate": 2.4332118581061034e-05, + "loss": 0.5075, + "step": 7953 + }, + { + "epoch": 5.134925758553906, + "grad_norm": 1.3696825533374384, + "learning_rate": 2.429685522731258e-05, + "loss": 0.5561, + "step": 7954 + }, + { + "epoch": 5.13557133634603, + "grad_norm": 1.5634922348297708, + "learning_rate": 2.426161519280258e-05, + "loss": 0.5617, + "step": 7955 + }, + { + "epoch": 5.136216914138154, + "grad_norm": 1.3628248286196745, + "learning_rate": 2.4226398484068443e-05, + "loss": 0.506, + "step": 7956 + }, + { + "epoch": 5.136862491930278, + "grad_norm": 1.3815029681707083, + "learning_rate": 2.4191205107643224e-05, + "loss": 0.5051, + "step": 7957 + }, + { + "epoch": 5.137508069722402, + "grad_norm": 1.5418729106235713, + "learning_rate": 2.4156035070055618e-05, + "loss": 0.549, + "step": 7958 + }, + { + "epoch": 5.1381536475145255, + "grad_norm": 1.756959999474512, + "learning_rate": 2.4120888377830137e-05, + "loss": 0.5741, + "step": 7959 + }, + { + "epoch": 5.1387992253066495, + "grad_norm": 1.331486439680874, + "learning_rate": 2.408576503748677e-05, + "loss": 0.5276, + "step": 7960 + }, + { + "epoch": 5.139444803098773, + "grad_norm": 1.3337291325501068, + "learning_rate": 2.4050665055541235e-05, + "loss": 0.5312, + "step": 7961 + }, + { + "epoch": 5.140090380890897, + "grad_norm": 1.418436578318154, + "learning_rate": 2.4015588438505072e-05, + "loss": 0.5746, + "step": 7962 + }, + { + "epoch": 5.140735958683021, + "grad_norm": 1.3958189313825105, + "learning_rate": 2.3980535192885232e-05, + "loss": 0.5426, + "step": 7963 + }, + { + "epoch": 5.141381536475145, + "grad_norm": 1.5745590756312897, + "learning_rate": 2.3945505325184473e-05, + "loss": 0.5531, + "step": 7964 + }, + { + "epoch": 5.142027114267269, + "grad_norm": 1.4467871633490428, + "learning_rate": 2.3910498841901315e-05, + "loss": 0.5319, + "step": 7965 + }, + { + "epoch": 5.142672692059393, + "grad_norm": 1.6234237451968943, + "learning_rate": 2.387551574952965e-05, + "loss": 0.522, + "step": 7966 + }, + { + "epoch": 5.143318269851517, + "grad_norm": 1.4723648842581838, + "learning_rate": 2.3840556054559316e-05, + "loss": 0.5108, + "step": 7967 + }, + { + "epoch": 5.143963847643641, + "grad_norm": 1.2861300898112515, + "learning_rate": 2.3805619763475732e-05, + "loss": 0.4571, + "step": 7968 + }, + { + "epoch": 5.144609425435765, + "grad_norm": 1.3550614540541364, + "learning_rate": 2.3770706882759803e-05, + "loss": 0.5461, + "step": 7969 + }, + { + "epoch": 5.145255003227889, + "grad_norm": 1.3263985717869449, + "learning_rate": 2.3735817418888357e-05, + "loss": 0.5663, + "step": 7970 + }, + { + "epoch": 5.145900581020013, + "grad_norm": 1.389931219631167, + "learning_rate": 2.3700951378333732e-05, + "loss": 0.5426, + "step": 7971 + }, + { + "epoch": 5.146546158812137, + "grad_norm": 1.325981412146945, + "learning_rate": 2.3666108767563868e-05, + "loss": 0.5342, + "step": 7972 + }, + { + "epoch": 5.1471917366042605, + "grad_norm": 1.4082508082351384, + "learning_rate": 2.3631289593042497e-05, + "loss": 0.5125, + "step": 7973 + }, + { + "epoch": 5.1478373143963845, + "grad_norm": 1.3371372428008463, + "learning_rate": 2.3596493861228966e-05, + "loss": 0.6119, + "step": 7974 + }, + { + "epoch": 5.148482892188508, + "grad_norm": 1.4388743190299647, + "learning_rate": 2.3561721578578112e-05, + "loss": 0.5067, + "step": 7975 + }, + { + "epoch": 5.149128469980632, + "grad_norm": 1.3355375215920289, + "learning_rate": 2.3526972751540696e-05, + "loss": 0.4851, + "step": 7976 + }, + { + "epoch": 5.149774047772756, + "grad_norm": 1.3693742945226095, + "learning_rate": 2.3492247386562924e-05, + "loss": 0.492, + "step": 7977 + }, + { + "epoch": 5.15041962556488, + "grad_norm": 1.2470935227998365, + "learning_rate": 2.3457545490086728e-05, + "loss": 0.4707, + "step": 7978 + }, + { + "epoch": 5.151065203357004, + "grad_norm": 1.2189868981546783, + "learning_rate": 2.3422867068549657e-05, + "loss": 0.443, + "step": 7979 + }, + { + "epoch": 5.151710781149128, + "grad_norm": 1.311654372554875, + "learning_rate": 2.3388212128384913e-05, + "loss": 0.5294, + "step": 7980 + }, + { + "epoch": 5.152356358941253, + "grad_norm": 1.746879849323045, + "learning_rate": 2.335358067602136e-05, + "loss": 0.4306, + "step": 7981 + }, + { + "epoch": 5.153001936733377, + "grad_norm": 1.5175669978007098, + "learning_rate": 2.3318972717883487e-05, + "loss": 0.5366, + "step": 7982 + }, + { + "epoch": 5.153647514525501, + "grad_norm": 1.44403750165122, + "learning_rate": 2.3284388260391413e-05, + "loss": 0.5385, + "step": 7983 + }, + { + "epoch": 5.154293092317625, + "grad_norm": 1.3991653472303467, + "learning_rate": 2.3249827309960983e-05, + "loss": 0.5454, + "step": 7984 + }, + { + "epoch": 5.154938670109749, + "grad_norm": 1.2054816144485747, + "learning_rate": 2.321528987300354e-05, + "loss": 0.4811, + "step": 7985 + }, + { + "epoch": 5.1555842479018725, + "grad_norm": 1.3958017802776057, + "learning_rate": 2.3180775955926122e-05, + "loss": 0.5568, + "step": 7986 + }, + { + "epoch": 5.1562298256939965, + "grad_norm": 1.5262828208354615, + "learning_rate": 2.3146285565131527e-05, + "loss": 0.642, + "step": 7987 + }, + { + "epoch": 5.15687540348612, + "grad_norm": 1.453437635337112, + "learning_rate": 2.311181870701797e-05, + "loss": 0.5709, + "step": 7988 + }, + { + "epoch": 5.157520981278244, + "grad_norm": 1.526784672842881, + "learning_rate": 2.307737538797942e-05, + "loss": 0.5208, + "step": 7989 + }, + { + "epoch": 5.158166559070368, + "grad_norm": 2.1100163792547493, + "learning_rate": 2.3042955614405563e-05, + "loss": 0.4604, + "step": 7990 + }, + { + "epoch": 5.158812136862492, + "grad_norm": 1.4394321877415228, + "learning_rate": 2.3008559392681493e-05, + "loss": 0.5375, + "step": 7991 + }, + { + "epoch": 5.159457714654616, + "grad_norm": 1.396610959478822, + "learning_rate": 2.297418672918817e-05, + "loss": 0.5452, + "step": 7992 + }, + { + "epoch": 5.16010329244674, + "grad_norm": 1.38411045020527, + "learning_rate": 2.2939837630302083e-05, + "loss": 0.4839, + "step": 7993 + }, + { + "epoch": 5.160748870238864, + "grad_norm": 1.3611286552431467, + "learning_rate": 2.2905512102395247e-05, + "loss": 0.4921, + "step": 7994 + }, + { + "epoch": 5.161394448030988, + "grad_norm": 2.2755743730668545, + "learning_rate": 2.2871210151835483e-05, + "loss": 0.5234, + "step": 7995 + }, + { + "epoch": 5.162040025823112, + "grad_norm": 1.4661871638882105, + "learning_rate": 2.283693178498619e-05, + "loss": 0.5674, + "step": 7996 + }, + { + "epoch": 5.162685603615236, + "grad_norm": 1.3697734528652306, + "learning_rate": 2.2802677008206243e-05, + "loss": 0.495, + "step": 7997 + }, + { + "epoch": 5.16333118140736, + "grad_norm": 1.3210087406562783, + "learning_rate": 2.276844582785036e-05, + "loss": 0.5222, + "step": 7998 + }, + { + "epoch": 5.163976759199484, + "grad_norm": 1.6666589121115873, + "learning_rate": 2.2734238250268783e-05, + "loss": 0.5304, + "step": 7999 + }, + { + "epoch": 5.1646223369916076, + "grad_norm": 1.6718302073694304, + "learning_rate": 2.2700054281807262e-05, + "loss": 0.5987, + "step": 8000 + }, + { + "epoch": 5.1652679147837315, + "grad_norm": 1.487364332885369, + "learning_rate": 2.2665893928807395e-05, + "loss": 0.5616, + "step": 8001 + }, + { + "epoch": 5.165913492575855, + "grad_norm": 1.523031348429179, + "learning_rate": 2.2631757197606242e-05, + "loss": 0.4927, + "step": 8002 + }, + { + "epoch": 5.166559070367979, + "grad_norm": 1.6328283504050534, + "learning_rate": 2.2597644094536526e-05, + "loss": 0.6071, + "step": 8003 + }, + { + "epoch": 5.167204648160103, + "grad_norm": 1.348685358382921, + "learning_rate": 2.2563554625926566e-05, + "loss": 0.5384, + "step": 8004 + }, + { + "epoch": 5.167850225952227, + "grad_norm": 1.4134758670575334, + "learning_rate": 2.252948879810032e-05, + "loss": 0.5691, + "step": 8005 + }, + { + "epoch": 5.168495803744351, + "grad_norm": 1.4407069480903287, + "learning_rate": 2.2495446617377366e-05, + "loss": 0.5717, + "step": 8006 + }, + { + "epoch": 5.169141381536475, + "grad_norm": 1.5210121656593596, + "learning_rate": 2.246142809007287e-05, + "loss": 0.5225, + "step": 8007 + }, + { + "epoch": 5.169786959328599, + "grad_norm": 1.2721537451948128, + "learning_rate": 2.2427433222497583e-05, + "loss": 0.4786, + "step": 8008 + }, + { + "epoch": 5.170432537120723, + "grad_norm": 1.352658046744238, + "learning_rate": 2.2393462020958025e-05, + "loss": 0.4723, + "step": 8009 + }, + { + "epoch": 5.171078114912847, + "grad_norm": 1.384410347467058, + "learning_rate": 2.235951449175609e-05, + "loss": 0.5465, + "step": 8010 + }, + { + "epoch": 5.171723692704971, + "grad_norm": 1.4451167055050547, + "learning_rate": 2.2325590641189394e-05, + "loss": 0.5557, + "step": 8011 + }, + { + "epoch": 5.172369270497095, + "grad_norm": 1.222529076407143, + "learning_rate": 2.229169047555128e-05, + "loss": 0.494, + "step": 8012 + }, + { + "epoch": 5.173014848289219, + "grad_norm": 1.3843650812409924, + "learning_rate": 2.2257814001130457e-05, + "loss": 0.5722, + "step": 8013 + }, + { + "epoch": 5.173660426081343, + "grad_norm": 1.3181925129719163, + "learning_rate": 2.2223961224211396e-05, + "loss": 0.5441, + "step": 8014 + }, + { + "epoch": 5.1743060038734665, + "grad_norm": 1.3234707924446287, + "learning_rate": 2.219013215107422e-05, + "loss": 0.5117, + "step": 8015 + }, + { + "epoch": 5.17495158166559, + "grad_norm": 1.2021497260687584, + "learning_rate": 2.2156326787994427e-05, + "loss": 0.4643, + "step": 8016 + }, + { + "epoch": 5.175597159457714, + "grad_norm": 1.1944987912133884, + "learning_rate": 2.2122545141243388e-05, + "loss": 0.435, + "step": 8017 + }, + { + "epoch": 5.176242737249838, + "grad_norm": 1.4851833786251096, + "learning_rate": 2.2088787217087933e-05, + "loss": 0.5531, + "step": 8018 + }, + { + "epoch": 5.176888315041962, + "grad_norm": 1.5286279944736136, + "learning_rate": 2.205505302179041e-05, + "loss": 0.547, + "step": 8019 + }, + { + "epoch": 5.177533892834086, + "grad_norm": 1.6391293609246045, + "learning_rate": 2.2021342561608957e-05, + "loss": 0.4776, + "step": 8020 + }, + { + "epoch": 5.17817947062621, + "grad_norm": 1.3686620083988148, + "learning_rate": 2.1987655842797223e-05, + "loss": 0.4241, + "step": 8021 + }, + { + "epoch": 5.178825048418334, + "grad_norm": 1.4824707047559218, + "learning_rate": 2.1953992871604365e-05, + "loss": 0.5782, + "step": 8022 + }, + { + "epoch": 5.179470626210458, + "grad_norm": 1.366812797226169, + "learning_rate": 2.1920353654275265e-05, + "loss": 0.5379, + "step": 8023 + }, + { + "epoch": 5.180116204002582, + "grad_norm": 1.572300735792842, + "learning_rate": 2.1886738197050346e-05, + "loss": 0.5681, + "step": 8024 + }, + { + "epoch": 5.180761781794706, + "grad_norm": 1.3433283072296722, + "learning_rate": 2.1853146506165624e-05, + "loss": 0.5026, + "step": 8025 + }, + { + "epoch": 5.181407359586831, + "grad_norm": 1.2508227432305317, + "learning_rate": 2.1819578587852697e-05, + "loss": 0.5066, + "step": 8026 + }, + { + "epoch": 5.182052937378955, + "grad_norm": 1.4256020993910303, + "learning_rate": 2.1786034448338768e-05, + "loss": 0.5918, + "step": 8027 + }, + { + "epoch": 5.1826985151710785, + "grad_norm": 1.3988857045209537, + "learning_rate": 2.1752514093846623e-05, + "loss": 0.5779, + "step": 8028 + }, + { + "epoch": 5.183344092963202, + "grad_norm": 1.5497251146822728, + "learning_rate": 2.171901753059464e-05, + "loss": 0.5342, + "step": 8029 + }, + { + "epoch": 5.183989670755326, + "grad_norm": 1.307920454096179, + "learning_rate": 2.1685544764796763e-05, + "loss": 0.4802, + "step": 8030 + }, + { + "epoch": 5.18463524854745, + "grad_norm": 1.4741484949649908, + "learning_rate": 2.1652095802662545e-05, + "loss": 0.5258, + "step": 8031 + }, + { + "epoch": 5.185280826339574, + "grad_norm": 1.4209526293137762, + "learning_rate": 2.161867065039712e-05, + "loss": 0.4921, + "step": 8032 + }, + { + "epoch": 5.185926404131698, + "grad_norm": 1.3769287795412817, + "learning_rate": 2.1585269314201176e-05, + "loss": 0.5228, + "step": 8033 + }, + { + "epoch": 5.186571981923822, + "grad_norm": 1.5227980694381473, + "learning_rate": 2.1551891800271092e-05, + "loss": 0.5872, + "step": 8034 + }, + { + "epoch": 5.187217559715946, + "grad_norm": 1.4278105281803166, + "learning_rate": 2.151853811479865e-05, + "loss": 0.5036, + "step": 8035 + }, + { + "epoch": 5.18786313750807, + "grad_norm": 1.397127338777918, + "learning_rate": 2.14852082639713e-05, + "loss": 0.5113, + "step": 8036 + }, + { + "epoch": 5.188508715300194, + "grad_norm": 1.391992016029055, + "learning_rate": 2.1451902253972176e-05, + "loss": 0.5163, + "step": 8037 + }, + { + "epoch": 5.189154293092318, + "grad_norm": 1.5250258485405463, + "learning_rate": 2.1418620090979805e-05, + "loss": 0.6242, + "step": 8038 + }, + { + "epoch": 5.189799870884442, + "grad_norm": 1.7087584407348921, + "learning_rate": 2.1385361781168347e-05, + "loss": 0.5304, + "step": 8039 + }, + { + "epoch": 5.190445448676566, + "grad_norm": 1.3894207572031936, + "learning_rate": 2.1352127330707686e-05, + "loss": 0.5946, + "step": 8040 + }, + { + "epoch": 5.19109102646869, + "grad_norm": 1.4376112304568076, + "learning_rate": 2.131891674576302e-05, + "loss": 0.5565, + "step": 8041 + }, + { + "epoch": 5.1917366042608135, + "grad_norm": 1.2611062460912168, + "learning_rate": 2.1285730032495342e-05, + "loss": 0.4491, + "step": 8042 + }, + { + "epoch": 5.1923821820529374, + "grad_norm": 1.2938105596005296, + "learning_rate": 2.1252567197061155e-05, + "loss": 0.4639, + "step": 8043 + }, + { + "epoch": 5.193027759845061, + "grad_norm": 1.1734892227664133, + "learning_rate": 2.121942824561239e-05, + "loss": 0.4401, + "step": 8044 + }, + { + "epoch": 5.193673337637185, + "grad_norm": 1.3155667207088988, + "learning_rate": 2.1186313184296768e-05, + "loss": 0.5524, + "step": 8045 + }, + { + "epoch": 5.194318915429309, + "grad_norm": 1.2849283887175633, + "learning_rate": 2.11532220192575e-05, + "loss": 0.5002, + "step": 8046 + }, + { + "epoch": 5.194964493221433, + "grad_norm": 1.318611217418217, + "learning_rate": 2.1120154756633213e-05, + "loss": 0.4921, + "step": 8047 + }, + { + "epoch": 5.195610071013557, + "grad_norm": 1.4257324419268538, + "learning_rate": 2.108711140255833e-05, + "loss": 0.5262, + "step": 8048 + }, + { + "epoch": 5.196255648805681, + "grad_norm": 1.3995047368677431, + "learning_rate": 2.1054091963162724e-05, + "loss": 0.5428, + "step": 8049 + }, + { + "epoch": 5.196901226597805, + "grad_norm": 1.329786398523794, + "learning_rate": 2.1021096444571815e-05, + "loss": 0.4533, + "step": 8050 + }, + { + "epoch": 5.197546804389929, + "grad_norm": 1.4407768296036907, + "learning_rate": 2.0988124852906652e-05, + "loss": 0.5341, + "step": 8051 + }, + { + "epoch": 5.198192382182053, + "grad_norm": 1.3176337902849964, + "learning_rate": 2.0955177194283767e-05, + "loss": 0.4988, + "step": 8052 + }, + { + "epoch": 5.198837959974177, + "grad_norm": 1.2866115647792915, + "learning_rate": 2.0922253474815326e-05, + "loss": 0.4641, + "step": 8053 + }, + { + "epoch": 5.199483537766301, + "grad_norm": 1.309586351047947, + "learning_rate": 2.088935370060899e-05, + "loss": 0.4928, + "step": 8054 + }, + { + "epoch": 5.200129115558425, + "grad_norm": 1.4312038857844596, + "learning_rate": 2.085647787776803e-05, + "loss": 0.4602, + "step": 8055 + }, + { + "epoch": 5.2007746933505485, + "grad_norm": 1.3253904438104367, + "learning_rate": 2.0823626012391248e-05, + "loss": 0.3743, + "step": 8056 + }, + { + "epoch": 5.2014202711426725, + "grad_norm": 1.4109340683823945, + "learning_rate": 2.0790798110573004e-05, + "loss": 0.5148, + "step": 8057 + }, + { + "epoch": 5.202065848934796, + "grad_norm": 1.4594277583697524, + "learning_rate": 2.075799417840319e-05, + "loss": 0.5285, + "step": 8058 + }, + { + "epoch": 5.20271142672692, + "grad_norm": 1.3051735575724597, + "learning_rate": 2.0725214221967363e-05, + "loss": 0.512, + "step": 8059 + }, + { + "epoch": 5.203357004519044, + "grad_norm": 1.3949231866722145, + "learning_rate": 2.069245824734645e-05, + "loss": 0.5403, + "step": 8060 + }, + { + "epoch": 5.204002582311168, + "grad_norm": 1.3100590353227035, + "learning_rate": 2.0659726260617028e-05, + "loss": 0.4443, + "step": 8061 + }, + { + "epoch": 5.204648160103292, + "grad_norm": 1.435348936941045, + "learning_rate": 2.062701826785132e-05, + "loss": 0.583, + "step": 8062 + }, + { + "epoch": 5.205293737895416, + "grad_norm": 1.8604151554133397, + "learning_rate": 2.059433427511689e-05, + "loss": 0.5112, + "step": 8063 + }, + { + "epoch": 5.20593931568754, + "grad_norm": 1.4651729309922699, + "learning_rate": 2.056167428847696e-05, + "loss": 0.6159, + "step": 8064 + }, + { + "epoch": 5.206584893479664, + "grad_norm": 1.2934168175652534, + "learning_rate": 2.0529038313990408e-05, + "loss": 0.5116, + "step": 8065 + }, + { + "epoch": 5.207230471271788, + "grad_norm": 1.4469568169703375, + "learning_rate": 2.04964263577114e-05, + "loss": 0.5714, + "step": 8066 + }, + { + "epoch": 5.207876049063912, + "grad_norm": 1.3520097024650948, + "learning_rate": 2.0463838425689905e-05, + "loss": 0.4429, + "step": 8067 + }, + { + "epoch": 5.208521626856037, + "grad_norm": 1.3457671570793854, + "learning_rate": 2.043127452397131e-05, + "loss": 0.4807, + "step": 8068 + }, + { + "epoch": 5.2091672046481605, + "grad_norm": 1.4142855759688862, + "learning_rate": 2.039873465859646e-05, + "loss": 0.5391, + "step": 8069 + }, + { + "epoch": 5.2098127824402845, + "grad_norm": 1.4380908902915306, + "learning_rate": 2.036621883560195e-05, + "loss": 0.6051, + "step": 8070 + }, + { + "epoch": 5.210458360232408, + "grad_norm": 1.4718530173955313, + "learning_rate": 2.0333727061019785e-05, + "loss": 0.5092, + "step": 8071 + }, + { + "epoch": 5.211103938024532, + "grad_norm": 1.4686161634606865, + "learning_rate": 2.0301259340877425e-05, + "loss": 0.5579, + "step": 8072 + }, + { + "epoch": 5.211749515816656, + "grad_norm": 1.3311310997681878, + "learning_rate": 2.0268815681198102e-05, + "loss": 0.5069, + "step": 8073 + }, + { + "epoch": 5.21239509360878, + "grad_norm": 1.315616384888631, + "learning_rate": 2.0236396088000375e-05, + "loss": 0.4617, + "step": 8074 + }, + { + "epoch": 5.213040671400904, + "grad_norm": 1.415650551005366, + "learning_rate": 2.020400056729844e-05, + "loss": 0.5275, + "step": 8075 + }, + { + "epoch": 5.213686249193028, + "grad_norm": 1.3366609228085695, + "learning_rate": 2.0171629125101997e-05, + "loss": 0.487, + "step": 8076 + }, + { + "epoch": 5.214331826985152, + "grad_norm": 1.2578750700495258, + "learning_rate": 2.013928176741629e-05, + "loss": 0.4615, + "step": 8077 + }, + { + "epoch": 5.214977404777276, + "grad_norm": 1.1765085027469597, + "learning_rate": 2.010695850024208e-05, + "loss": 0.4224, + "step": 8078 + }, + { + "epoch": 5.2156229825694, + "grad_norm": 1.5232099574473823, + "learning_rate": 2.0074659329575682e-05, + "loss": 0.5695, + "step": 8079 + }, + { + "epoch": 5.216268560361524, + "grad_norm": 1.6035824667888405, + "learning_rate": 2.0042384261408912e-05, + "loss": 0.666, + "step": 8080 + }, + { + "epoch": 5.216914138153648, + "grad_norm": 1.3813667499228395, + "learning_rate": 2.001013330172913e-05, + "loss": 0.6036, + "step": 8081 + }, + { + "epoch": 5.217559715945772, + "grad_norm": 1.347444780873703, + "learning_rate": 1.9977906456519256e-05, + "loss": 0.56, + "step": 8082 + }, + { + "epoch": 5.2182052937378955, + "grad_norm": 1.3647277995473341, + "learning_rate": 1.9945703731757617e-05, + "loss": 0.5578, + "step": 8083 + }, + { + "epoch": 5.2188508715300195, + "grad_norm": 1.452814100075582, + "learning_rate": 1.991352513341829e-05, + "loss": 0.5226, + "step": 8084 + }, + { + "epoch": 5.219496449322143, + "grad_norm": 1.429637070451671, + "learning_rate": 1.9881370667470647e-05, + "loss": 0.5015, + "step": 8085 + }, + { + "epoch": 5.220142027114267, + "grad_norm": 1.52594635762268, + "learning_rate": 1.9849240339879636e-05, + "loss": 0.5432, + "step": 8086 + }, + { + "epoch": 5.220787604906391, + "grad_norm": 1.3294177238183857, + "learning_rate": 1.9817134156605902e-05, + "loss": 0.5328, + "step": 8087 + }, + { + "epoch": 5.221433182698515, + "grad_norm": 1.4663142925894008, + "learning_rate": 1.9785052123605367e-05, + "loss": 0.5481, + "step": 8088 + }, + { + "epoch": 5.222078760490639, + "grad_norm": 1.369591243668228, + "learning_rate": 1.9752994246829567e-05, + "loss": 0.5667, + "step": 8089 + }, + { + "epoch": 5.222724338282763, + "grad_norm": 1.3568519726299508, + "learning_rate": 1.97209605322257e-05, + "loss": 0.4705, + "step": 8090 + }, + { + "epoch": 5.223369916074887, + "grad_norm": 1.2874809301748098, + "learning_rate": 1.9688950985736175e-05, + "loss": 0.4174, + "step": 8091 + }, + { + "epoch": 5.224015493867011, + "grad_norm": 1.6515745699103983, + "learning_rate": 1.9656965613299243e-05, + "loss": 0.5714, + "step": 8092 + }, + { + "epoch": 5.224661071659135, + "grad_norm": 1.5466291932931715, + "learning_rate": 1.962500442084849e-05, + "loss": 0.4455, + "step": 8093 + }, + { + "epoch": 5.225306649451259, + "grad_norm": 1.2937468534253396, + "learning_rate": 1.9593067414312956e-05, + "loss": 0.4979, + "step": 8094 + }, + { + "epoch": 5.225952227243383, + "grad_norm": 1.35553252401995, + "learning_rate": 1.956115459961739e-05, + "loss": 0.504, + "step": 8095 + }, + { + "epoch": 5.226597805035507, + "grad_norm": 1.6120080432423038, + "learning_rate": 1.952926598268195e-05, + "loss": 0.5118, + "step": 8096 + }, + { + "epoch": 5.227243382827631, + "grad_norm": 1.4100562121950924, + "learning_rate": 1.949740156942223e-05, + "loss": 0.5444, + "step": 8097 + }, + { + "epoch": 5.2278889606197545, + "grad_norm": 1.5537022857957392, + "learning_rate": 1.9465561365749456e-05, + "loss": 0.4578, + "step": 8098 + }, + { + "epoch": 5.228534538411878, + "grad_norm": 1.3508801428555037, + "learning_rate": 1.9433745377570326e-05, + "loss": 0.5235, + "step": 8099 + }, + { + "epoch": 5.229180116204002, + "grad_norm": 1.2750865065684043, + "learning_rate": 1.9401953610787018e-05, + "loss": 0.5102, + "step": 8100 + }, + { + "epoch": 5.229825693996126, + "grad_norm": 1.3725911546435547, + "learning_rate": 1.937018607129725e-05, + "loss": 0.5165, + "step": 8101 + }, + { + "epoch": 5.23047127178825, + "grad_norm": 1.3784830509815602, + "learning_rate": 1.933844276499422e-05, + "loss": 0.5153, + "step": 8102 + }, + { + "epoch": 5.231116849580374, + "grad_norm": 1.3619158639206013, + "learning_rate": 1.9306723697766642e-05, + "loss": 0.5734, + "step": 8103 + }, + { + "epoch": 5.231762427372498, + "grad_norm": 1.3695935190451431, + "learning_rate": 1.9275028875498717e-05, + "loss": 0.5012, + "step": 8104 + }, + { + "epoch": 5.232408005164622, + "grad_norm": 1.4023540791909548, + "learning_rate": 1.9243358304070196e-05, + "loss": 0.576, + "step": 8105 + }, + { + "epoch": 5.233053582956746, + "grad_norm": 1.3254944825074548, + "learning_rate": 1.921171198935626e-05, + "loss": 0.4416, + "step": 8106 + }, + { + "epoch": 5.23369916074887, + "grad_norm": 1.2034309019920804, + "learning_rate": 1.9180089937227668e-05, + "loss": 0.4594, + "step": 8107 + }, + { + "epoch": 5.234344738540994, + "grad_norm": 1.374601069091489, + "learning_rate": 1.9148492153550586e-05, + "loss": 0.5032, + "step": 8108 + }, + { + "epoch": 5.234990316333118, + "grad_norm": 1.402609993374759, + "learning_rate": 1.9116918644186834e-05, + "loss": 0.5262, + "step": 8109 + }, + { + "epoch": 5.235635894125242, + "grad_norm": 1.401710995524525, + "learning_rate": 1.9085369414993507e-05, + "loss": 0.5027, + "step": 8110 + }, + { + "epoch": 5.236281471917366, + "grad_norm": 1.3783287628938785, + "learning_rate": 1.9053844471823353e-05, + "loss": 0.4664, + "step": 8111 + }, + { + "epoch": 5.23692704970949, + "grad_norm": 1.4938908397906197, + "learning_rate": 1.9022343820524648e-05, + "loss": 0.6322, + "step": 8112 + }, + { + "epoch": 5.237572627501614, + "grad_norm": 1.3697466783432382, + "learning_rate": 1.8990867466941e-05, + "loss": 0.4986, + "step": 8113 + }, + { + "epoch": 5.238218205293738, + "grad_norm": 1.4179912305657034, + "learning_rate": 1.895941541691159e-05, + "loss": 0.6213, + "step": 8114 + }, + { + "epoch": 5.238863783085862, + "grad_norm": 1.2956182336000366, + "learning_rate": 1.892798767627121e-05, + "loss": 0.4446, + "step": 8115 + }, + { + "epoch": 5.239509360877986, + "grad_norm": 1.4448223082925233, + "learning_rate": 1.889658425084989e-05, + "loss": 0.584, + "step": 8116 + }, + { + "epoch": 5.24015493867011, + "grad_norm": 1.2702931580393984, + "learning_rate": 1.8865205146473393e-05, + "loss": 0.4669, + "step": 8117 + }, + { + "epoch": 5.240800516462234, + "grad_norm": 1.495944383843726, + "learning_rate": 1.883385036896289e-05, + "loss": 0.5748, + "step": 8118 + }, + { + "epoch": 5.241446094254358, + "grad_norm": 1.46436583406572, + "learning_rate": 1.8802519924134884e-05, + "loss": 0.5232, + "step": 8119 + }, + { + "epoch": 5.242091672046482, + "grad_norm": 1.707789433069632, + "learning_rate": 1.8771213817801633e-05, + "loss": 0.7951, + "step": 8120 + }, + { + "epoch": 5.242737249838606, + "grad_norm": 1.5531312012937217, + "learning_rate": 1.8739932055770707e-05, + "loss": 0.6089, + "step": 8121 + }, + { + "epoch": 5.24338282763073, + "grad_norm": 1.4054133116818726, + "learning_rate": 1.8708674643845127e-05, + "loss": 0.5224, + "step": 8122 + }, + { + "epoch": 5.244028405422854, + "grad_norm": 1.3275565122306607, + "learning_rate": 1.8677441587823542e-05, + "loss": 0.5041, + "step": 8123 + }, + { + "epoch": 5.244673983214978, + "grad_norm": 1.3595560735605008, + "learning_rate": 1.86462328935e-05, + "loss": 0.5345, + "step": 8124 + }, + { + "epoch": 5.2453195610071015, + "grad_norm": 1.5018997247635275, + "learning_rate": 1.8615048566664037e-05, + "loss": 0.5292, + "step": 8125 + }, + { + "epoch": 5.245965138799225, + "grad_norm": 1.8182441549751227, + "learning_rate": 1.8583888613100656e-05, + "loss": 0.5997, + "step": 8126 + }, + { + "epoch": 5.246610716591349, + "grad_norm": 1.2567328687299342, + "learning_rate": 1.8552753038590334e-05, + "loss": 0.4905, + "step": 8127 + }, + { + "epoch": 5.247256294383473, + "grad_norm": 1.399988825869428, + "learning_rate": 1.8521641848909086e-05, + "loss": 0.5643, + "step": 8128 + }, + { + "epoch": 5.247901872175597, + "grad_norm": 1.4991879459569044, + "learning_rate": 1.8490555049828342e-05, + "loss": 0.5798, + "step": 8129 + }, + { + "epoch": 5.248547449967721, + "grad_norm": 1.3937659882266973, + "learning_rate": 1.845949264711501e-05, + "loss": 0.4934, + "step": 8130 + }, + { + "epoch": 5.249193027759845, + "grad_norm": 1.5480276865863922, + "learning_rate": 1.842845464653151e-05, + "loss": 0.5503, + "step": 8131 + }, + { + "epoch": 5.249838605551969, + "grad_norm": 1.2866691566829926, + "learning_rate": 1.8397441053835703e-05, + "loss": 0.4986, + "step": 8132 + }, + { + "epoch": 5.250484183344093, + "grad_norm": 1.3869820550516467, + "learning_rate": 1.8366451874780907e-05, + "loss": 0.5201, + "step": 8133 + }, + { + "epoch": 5.251129761136217, + "grad_norm": 1.2976954585340756, + "learning_rate": 1.833548711511602e-05, + "loss": 0.4991, + "step": 8134 + }, + { + "epoch": 5.251775338928341, + "grad_norm": 1.3085521957806756, + "learning_rate": 1.830454678058525e-05, + "loss": 0.4873, + "step": 8135 + }, + { + "epoch": 5.252420916720465, + "grad_norm": 1.418984579312986, + "learning_rate": 1.8273630876928334e-05, + "loss": 0.6087, + "step": 8136 + }, + { + "epoch": 5.253066494512589, + "grad_norm": 1.257300711820846, + "learning_rate": 1.8242739409880592e-05, + "loss": 0.4701, + "step": 8137 + }, + { + "epoch": 5.253712072304713, + "grad_norm": 1.488438762076898, + "learning_rate": 1.8211872385172614e-05, + "loss": 0.6337, + "step": 8138 + }, + { + "epoch": 5.2543576500968365, + "grad_norm": 1.3035792976922218, + "learning_rate": 1.818102980853056e-05, + "loss": 0.4939, + "step": 8139 + }, + { + "epoch": 5.2550032278889605, + "grad_norm": 1.3342516374607523, + "learning_rate": 1.8150211685676137e-05, + "loss": 0.5017, + "step": 8140 + }, + { + "epoch": 5.255648805681084, + "grad_norm": 1.3450324288339046, + "learning_rate": 1.8119418022326315e-05, + "loss": 0.5109, + "step": 8141 + }, + { + "epoch": 5.256294383473208, + "grad_norm": 1.274155980360063, + "learning_rate": 1.8088648824193713e-05, + "loss": 0.5011, + "step": 8142 + }, + { + "epoch": 5.256939961265332, + "grad_norm": 1.2640295452219183, + "learning_rate": 1.8057904096986346e-05, + "loss": 0.4974, + "step": 8143 + }, + { + "epoch": 5.257585539057456, + "grad_norm": 1.3906615720660558, + "learning_rate": 1.8027183846407606e-05, + "loss": 0.5305, + "step": 8144 + }, + { + "epoch": 5.25823111684958, + "grad_norm": 1.3968909620891878, + "learning_rate": 1.7996488078156462e-05, + "loss": 0.4948, + "step": 8145 + }, + { + "epoch": 5.258876694641704, + "grad_norm": 1.3716636274927008, + "learning_rate": 1.7965816797927348e-05, + "loss": 0.5319, + "step": 8146 + }, + { + "epoch": 5.259522272433828, + "grad_norm": 1.4152451110289752, + "learning_rate": 1.793517001140999e-05, + "loss": 0.5762, + "step": 8147 + }, + { + "epoch": 5.260167850225952, + "grad_norm": 3.6449306082854225, + "learning_rate": 1.7904547724289775e-05, + "loss": 0.5269, + "step": 8148 + }, + { + "epoch": 5.260813428018076, + "grad_norm": 1.426501620894557, + "learning_rate": 1.787394994224743e-05, + "loss": 0.5326, + "step": 8149 + }, + { + "epoch": 5.2614590058102, + "grad_norm": 1.3714338998139977, + "learning_rate": 1.784337667095914e-05, + "loss": 0.5009, + "step": 8150 + }, + { + "epoch": 5.262104583602324, + "grad_norm": 1.3952234219177961, + "learning_rate": 1.7812827916096613e-05, + "loss": 0.4386, + "step": 8151 + }, + { + "epoch": 5.262750161394448, + "grad_norm": 1.3707245095196259, + "learning_rate": 1.778230368332692e-05, + "loss": 0.5511, + "step": 8152 + }, + { + "epoch": 5.263395739186572, + "grad_norm": 1.592239895196913, + "learning_rate": 1.775180397831264e-05, + "loss": 0.4795, + "step": 8153 + }, + { + "epoch": 5.264041316978696, + "grad_norm": 1.6404060472024298, + "learning_rate": 1.7721328806711776e-05, + "loss": 0.505, + "step": 8154 + }, + { + "epoch": 5.26468689477082, + "grad_norm": 1.316392129093194, + "learning_rate": 1.769087817417779e-05, + "loss": 0.5062, + "step": 8155 + }, + { + "epoch": 5.265332472562944, + "grad_norm": 1.2621164904377427, + "learning_rate": 1.7660452086359583e-05, + "loss": 0.4969, + "step": 8156 + }, + { + "epoch": 5.265978050355068, + "grad_norm": 1.3121681209430434, + "learning_rate": 1.7630050548901537e-05, + "loss": 0.4442, + "step": 8157 + }, + { + "epoch": 5.266623628147192, + "grad_norm": 1.1717922861088865, + "learning_rate": 1.7599673567443407e-05, + "loss": 0.3978, + "step": 8158 + }, + { + "epoch": 5.267269205939316, + "grad_norm": 1.4033396562094118, + "learning_rate": 1.7569321147620518e-05, + "loss": 0.5239, + "step": 8159 + }, + { + "epoch": 5.26791478373144, + "grad_norm": 1.3250022875383844, + "learning_rate": 1.753899329506347e-05, + "loss": 0.4908, + "step": 8160 + }, + { + "epoch": 5.268560361523564, + "grad_norm": 1.2775023159895365, + "learning_rate": 1.7508690015398425e-05, + "loss": 0.4774, + "step": 8161 + }, + { + "epoch": 5.269205939315688, + "grad_norm": 1.4205327559212817, + "learning_rate": 1.7478411314247003e-05, + "loss": 0.5704, + "step": 8162 + }, + { + "epoch": 5.269851517107812, + "grad_norm": 1.7444035179491504, + "learning_rate": 1.7448157197226166e-05, + "loss": 0.5305, + "step": 8163 + }, + { + "epoch": 5.270497094899936, + "grad_norm": 3.395919406360991, + "learning_rate": 1.7417927669948335e-05, + "loss": 0.5571, + "step": 8164 + }, + { + "epoch": 5.27114267269206, + "grad_norm": 1.9243193991651444, + "learning_rate": 1.7387722738021508e-05, + "loss": 0.4554, + "step": 8165 + }, + { + "epoch": 5.2717882504841835, + "grad_norm": 1.3205875629552424, + "learning_rate": 1.7357542407048892e-05, + "loss": 0.5437, + "step": 8166 + }, + { + "epoch": 5.2724338282763075, + "grad_norm": 1.3054269234423492, + "learning_rate": 1.7327386682629335e-05, + "loss": 0.4674, + "step": 8167 + }, + { + "epoch": 5.273079406068431, + "grad_norm": 4.09038231865245, + "learning_rate": 1.7297255570357067e-05, + "loss": 0.4903, + "step": 8168 + }, + { + "epoch": 5.273724983860555, + "grad_norm": 1.3066502935784179, + "learning_rate": 1.726714907582158e-05, + "loss": 0.4441, + "step": 8169 + }, + { + "epoch": 5.274370561652679, + "grad_norm": 1.5920246611508984, + "learning_rate": 1.7237067204608074e-05, + "loss": 0.5294, + "step": 8170 + }, + { + "epoch": 5.275016139444803, + "grad_norm": 1.3302028805340476, + "learning_rate": 1.720700996229702e-05, + "loss": 0.4675, + "step": 8171 + }, + { + "epoch": 5.275661717236927, + "grad_norm": 1.4378983908454477, + "learning_rate": 1.7176977354464273e-05, + "loss": 0.5175, + "step": 8172 + }, + { + "epoch": 5.276307295029051, + "grad_norm": 1.5052510664150633, + "learning_rate": 1.7146969386681293e-05, + "loss": 0.4756, + "step": 8173 + }, + { + "epoch": 5.276952872821175, + "grad_norm": 1.3935549676043693, + "learning_rate": 1.7116986064514828e-05, + "loss": 0.565, + "step": 8174 + }, + { + "epoch": 5.277598450613299, + "grad_norm": 1.4080884991980738, + "learning_rate": 1.708702739352709e-05, + "loss": 0.5348, + "step": 8175 + }, + { + "epoch": 5.278244028405423, + "grad_norm": 1.350967958558371, + "learning_rate": 1.7057093379275727e-05, + "loss": 0.492, + "step": 8176 + }, + { + "epoch": 5.278889606197547, + "grad_norm": 1.4162712419849308, + "learning_rate": 1.7027184027313822e-05, + "loss": 0.5044, + "step": 8177 + }, + { + "epoch": 5.279535183989671, + "grad_norm": 1.2911238556119595, + "learning_rate": 1.6997299343189857e-05, + "loss": 0.4874, + "step": 8178 + }, + { + "epoch": 5.280180761781795, + "grad_norm": 1.3116553419082422, + "learning_rate": 1.6967439332447763e-05, + "loss": 0.4732, + "step": 8179 + }, + { + "epoch": 5.280826339573919, + "grad_norm": 1.281473919976096, + "learning_rate": 1.6937604000626858e-05, + "loss": 0.464, + "step": 8180 + }, + { + "epoch": 5.2814719173660425, + "grad_norm": 1.4299481846813833, + "learning_rate": 1.6907793353261933e-05, + "loss": 0.5676, + "step": 8181 + }, + { + "epoch": 5.282117495158166, + "grad_norm": 1.4025643806665782, + "learning_rate": 1.6878007395883165e-05, + "loss": 0.4876, + "step": 8182 + }, + { + "epoch": 5.28276307295029, + "grad_norm": 1.3988742551494002, + "learning_rate": 1.6848246134016115e-05, + "loss": 0.4522, + "step": 8183 + }, + { + "epoch": 5.283408650742414, + "grad_norm": 1.4003763490867966, + "learning_rate": 1.68185095731819e-05, + "loss": 0.4889, + "step": 8184 + }, + { + "epoch": 5.284054228534538, + "grad_norm": 1.391093520866526, + "learning_rate": 1.678879771889689e-05, + "loss": 0.5309, + "step": 8185 + }, + { + "epoch": 5.284699806326662, + "grad_norm": 1.236783008998457, + "learning_rate": 1.6759110576672913e-05, + "loss": 0.4024, + "step": 8186 + }, + { + "epoch": 5.285345384118786, + "grad_norm": 1.2975219598520897, + "learning_rate": 1.672944815201736e-05, + "loss": 0.4893, + "step": 8187 + }, + { + "epoch": 5.28599096191091, + "grad_norm": 1.3889166135649398, + "learning_rate": 1.6699810450432815e-05, + "loss": 0.5098, + "step": 8188 + }, + { + "epoch": 5.286636539703034, + "grad_norm": 1.4009689834053718, + "learning_rate": 1.6670197477417395e-05, + "loss": 0.5062, + "step": 8189 + }, + { + "epoch": 5.287282117495158, + "grad_norm": 1.475330114615466, + "learning_rate": 1.6640609238464677e-05, + "loss": 0.4454, + "step": 8190 + }, + { + "epoch": 5.287927695287282, + "grad_norm": 1.3386350146699884, + "learning_rate": 1.6611045739063477e-05, + "loss": 0.4435, + "step": 8191 + }, + { + "epoch": 5.288573273079406, + "grad_norm": 1.463868874601551, + "learning_rate": 1.6581506984698217e-05, + "loss": 0.5571, + "step": 8192 + }, + { + "epoch": 5.28921885087153, + "grad_norm": 1.3049666346437294, + "learning_rate": 1.655199298084866e-05, + "loss": 0.5276, + "step": 8193 + }, + { + "epoch": 5.289864428663654, + "grad_norm": 1.314545245578642, + "learning_rate": 1.6522503732989862e-05, + "loss": 0.5246, + "step": 8194 + }, + { + "epoch": 5.2905100064557775, + "grad_norm": 1.601946892812966, + "learning_rate": 1.6493039246592476e-05, + "loss": 0.5629, + "step": 8195 + }, + { + "epoch": 5.2911555842479014, + "grad_norm": 1.3287997108797844, + "learning_rate": 1.646359952712245e-05, + "loss": 0.4556, + "step": 8196 + }, + { + "epoch": 5.291801162040025, + "grad_norm": 1.5125735756709806, + "learning_rate": 1.64341845800411e-05, + "loss": 0.5533, + "step": 8197 + }, + { + "epoch": 5.292446739832149, + "grad_norm": 1.4175049547646017, + "learning_rate": 1.6404794410805273e-05, + "loss": 0.5705, + "step": 8198 + }, + { + "epoch": 5.293092317624274, + "grad_norm": 1.40212664232791, + "learning_rate": 1.6375429024867127e-05, + "loss": 0.4514, + "step": 8199 + }, + { + "epoch": 5.293737895416398, + "grad_norm": 1.2837656721882742, + "learning_rate": 1.6346088427674236e-05, + "loss": 0.5109, + "step": 8200 + }, + { + "epoch": 5.294383473208522, + "grad_norm": 1.3377842986478719, + "learning_rate": 1.63167726246696e-05, + "loss": 0.4613, + "step": 8201 + }, + { + "epoch": 5.295029051000646, + "grad_norm": 1.1957399754628586, + "learning_rate": 1.6287481621291597e-05, + "loss": 0.4493, + "step": 8202 + }, + { + "epoch": 5.29567462879277, + "grad_norm": 1.4073121118938134, + "learning_rate": 1.6258215422974004e-05, + "loss": 0.5498, + "step": 8203 + }, + { + "epoch": 5.296320206584894, + "grad_norm": 1.3819969588374403, + "learning_rate": 1.6228974035146035e-05, + "loss": 0.5335, + "step": 8204 + }, + { + "epoch": 5.296965784377018, + "grad_norm": 1.4523448520871114, + "learning_rate": 1.6199757463232242e-05, + "loss": 0.5342, + "step": 8205 + }, + { + "epoch": 5.297611362169142, + "grad_norm": 1.4176747172718687, + "learning_rate": 1.6170565712652605e-05, + "loss": 0.5355, + "step": 8206 + }, + { + "epoch": 5.298256939961266, + "grad_norm": 1.4285625050964441, + "learning_rate": 1.6141398788822508e-05, + "loss": 0.569, + "step": 8207 + }, + { + "epoch": 5.2989025177533895, + "grad_norm": 1.3989961121961805, + "learning_rate": 1.6112256697152688e-05, + "loss": 0.5258, + "step": 8208 + }, + { + "epoch": 5.299548095545513, + "grad_norm": 1.483347519431375, + "learning_rate": 1.6083139443049387e-05, + "loss": 0.5886, + "step": 8209 + }, + { + "epoch": 5.300193673337637, + "grad_norm": 1.454005425631991, + "learning_rate": 1.6054047031914068e-05, + "loss": 0.5132, + "step": 8210 + }, + { + "epoch": 5.300839251129761, + "grad_norm": 1.2247303859359862, + "learning_rate": 1.6024979469143685e-05, + "loss": 0.4469, + "step": 8211 + }, + { + "epoch": 5.301484828921885, + "grad_norm": 1.3036660973090313, + "learning_rate": 1.5995936760130652e-05, + "loss": 0.4794, + "step": 8212 + }, + { + "epoch": 5.302130406714009, + "grad_norm": 1.64077147406807, + "learning_rate": 1.5966918910262578e-05, + "loss": 0.4889, + "step": 8213 + }, + { + "epoch": 5.302775984506133, + "grad_norm": 1.5209382697760434, + "learning_rate": 1.5937925924922655e-05, + "loss": 0.5332, + "step": 8214 + }, + { + "epoch": 5.303421562298257, + "grad_norm": 1.3366123380207324, + "learning_rate": 1.5908957809489403e-05, + "loss": 0.4532, + "step": 8215 + }, + { + "epoch": 5.304067140090381, + "grad_norm": 1.310485360051688, + "learning_rate": 1.588001456933662e-05, + "loss": 0.4816, + "step": 8216 + }, + { + "epoch": 5.304712717882505, + "grad_norm": 1.2404284275703956, + "learning_rate": 1.5851096209833643e-05, + "loss": 0.4498, + "step": 8217 + }, + { + "epoch": 5.305358295674629, + "grad_norm": 1.530143923009476, + "learning_rate": 1.582220273634513e-05, + "loss": 0.4792, + "step": 8218 + }, + { + "epoch": 5.306003873466753, + "grad_norm": 1.3236764880054606, + "learning_rate": 1.5793334154231062e-05, + "loss": 0.5051, + "step": 8219 + }, + { + "epoch": 5.306649451258877, + "grad_norm": 1.729620247077421, + "learning_rate": 1.5764490468846906e-05, + "loss": 0.5048, + "step": 8220 + }, + { + "epoch": 5.307295029051001, + "grad_norm": 1.5755497166833514, + "learning_rate": 1.5735671685543517e-05, + "loss": 0.4933, + "step": 8221 + }, + { + "epoch": 5.3079406068431245, + "grad_norm": 1.6724369237527958, + "learning_rate": 1.5706877809666947e-05, + "loss": 0.5093, + "step": 8222 + }, + { + "epoch": 5.3085861846352485, + "grad_norm": 1.251424314833382, + "learning_rate": 1.5678108846558856e-05, + "loss": 0.5174, + "step": 8223 + }, + { + "epoch": 5.309231762427372, + "grad_norm": 1.3423392341267237, + "learning_rate": 1.5649364801556163e-05, + "loss": 0.5952, + "step": 8224 + }, + { + "epoch": 5.309877340219496, + "grad_norm": 1.3379146572672946, + "learning_rate": 1.5620645679991183e-05, + "loss": 0.4879, + "step": 8225 + }, + { + "epoch": 5.31052291801162, + "grad_norm": 1.3293393415590222, + "learning_rate": 1.559195148719162e-05, + "loss": 0.4679, + "step": 8226 + }, + { + "epoch": 5.311168495803744, + "grad_norm": 1.331421896761538, + "learning_rate": 1.556328222848054e-05, + "loss": 0.4801, + "step": 8227 + }, + { + "epoch": 5.311814073595868, + "grad_norm": 1.5754868182962751, + "learning_rate": 1.553463790917638e-05, + "loss": 0.6416, + "step": 8228 + }, + { + "epoch": 5.312459651387992, + "grad_norm": 1.2701351739167754, + "learning_rate": 1.5506018534592972e-05, + "loss": 0.4773, + "step": 8229 + }, + { + "epoch": 5.313105229180116, + "grad_norm": 1.2895678766159062, + "learning_rate": 1.5477424110039494e-05, + "loss": 0.4686, + "step": 8230 + }, + { + "epoch": 5.31375080697224, + "grad_norm": 1.5014930668664679, + "learning_rate": 1.5448854640820528e-05, + "loss": 0.5968, + "step": 8231 + }, + { + "epoch": 5.314396384764364, + "grad_norm": 1.4779986624975998, + "learning_rate": 1.5420310132235974e-05, + "loss": 0.5715, + "step": 8232 + }, + { + "epoch": 5.315041962556488, + "grad_norm": 1.3543066647589448, + "learning_rate": 1.5391790589581142e-05, + "loss": 0.4832, + "step": 8233 + }, + { + "epoch": 5.315687540348612, + "grad_norm": 1.3712112869224, + "learning_rate": 1.5363296018146754e-05, + "loss": 0.4999, + "step": 8234 + }, + { + "epoch": 5.316333118140736, + "grad_norm": 1.2942783168836556, + "learning_rate": 1.5334826423218794e-05, + "loss": 0.5274, + "step": 8235 + }, + { + "epoch": 5.3169786959328595, + "grad_norm": 1.3954382992956735, + "learning_rate": 1.5306381810078656e-05, + "loss": 0.4584, + "step": 8236 + }, + { + "epoch": 5.3176242737249835, + "grad_norm": 1.338565639146783, + "learning_rate": 1.52779621840032e-05, + "loss": 0.4683, + "step": 8237 + }, + { + "epoch": 5.318269851517107, + "grad_norm": 1.2470631905090976, + "learning_rate": 1.5249567550264446e-05, + "loss": 0.463, + "step": 8238 + }, + { + "epoch": 5.318915429309232, + "grad_norm": 1.3407272531167143, + "learning_rate": 1.5221197914129962e-05, + "loss": 0.5277, + "step": 8239 + }, + { + "epoch": 5.319561007101356, + "grad_norm": 1.3114149054482522, + "learning_rate": 1.5192853280862638e-05, + "loss": 0.4817, + "step": 8240 + }, + { + "epoch": 5.32020658489348, + "grad_norm": 1.6506746457561337, + "learning_rate": 1.51645336557206e-05, + "loss": 0.4721, + "step": 8241 + }, + { + "epoch": 5.320852162685604, + "grad_norm": 1.380002838511613, + "learning_rate": 1.5136239043957499e-05, + "loss": 0.475, + "step": 8242 + }, + { + "epoch": 5.321497740477728, + "grad_norm": 1.3765298957620222, + "learning_rate": 1.5107969450822316e-05, + "loss": 0.4325, + "step": 8243 + }, + { + "epoch": 5.322143318269852, + "grad_norm": 1.2372674012607061, + "learning_rate": 1.5079724881559241e-05, + "loss": 0.467, + "step": 8244 + }, + { + "epoch": 5.322788896061976, + "grad_norm": 2.3784220684361292, + "learning_rate": 1.5051505341408032e-05, + "loss": 0.5426, + "step": 8245 + }, + { + "epoch": 5.3234344738541, + "grad_norm": 1.537314925618004, + "learning_rate": 1.5023310835603703e-05, + "loss": 0.5408, + "step": 8246 + }, + { + "epoch": 5.324080051646224, + "grad_norm": 1.3917730085251727, + "learning_rate": 1.499514136937655e-05, + "loss": 0.5272, + "step": 8247 + }, + { + "epoch": 5.324725629438348, + "grad_norm": 1.589361649776454, + "learning_rate": 1.4966996947952364e-05, + "loss": 0.4379, + "step": 8248 + }, + { + "epoch": 5.3253712072304715, + "grad_norm": 1.4227985233720997, + "learning_rate": 1.4938877576552233e-05, + "loss": 0.4884, + "step": 8249 + }, + { + "epoch": 5.3260167850225955, + "grad_norm": 1.438244803700216, + "learning_rate": 1.4910783260392551e-05, + "loss": 0.4533, + "step": 8250 + }, + { + "epoch": 5.326662362814719, + "grad_norm": 1.340511374602047, + "learning_rate": 1.4882714004685148e-05, + "loss": 0.5179, + "step": 8251 + }, + { + "epoch": 5.327307940606843, + "grad_norm": 1.486267732029835, + "learning_rate": 1.4854669814637143e-05, + "loss": 0.5398, + "step": 8252 + }, + { + "epoch": 5.327953518398967, + "grad_norm": 1.3705563056367887, + "learning_rate": 1.4826650695451009e-05, + "loss": 0.4681, + "step": 8253 + }, + { + "epoch": 5.328599096191091, + "grad_norm": 1.4565434663483143, + "learning_rate": 1.4798656652324604e-05, + "loss": 0.516, + "step": 8254 + }, + { + "epoch": 5.329244673983215, + "grad_norm": 1.3047528917546944, + "learning_rate": 1.4770687690451121e-05, + "loss": 0.4802, + "step": 8255 + }, + { + "epoch": 5.329890251775339, + "grad_norm": 1.4016845597311651, + "learning_rate": 1.4742743815019064e-05, + "loss": 0.5756, + "step": 8256 + }, + { + "epoch": 5.330535829567463, + "grad_norm": 1.3955840436630464, + "learning_rate": 1.4714825031212334e-05, + "loss": 0.5625, + "step": 8257 + }, + { + "epoch": 5.331181407359587, + "grad_norm": 1.4318899032237709, + "learning_rate": 1.4686931344210124e-05, + "loss": 0.508, + "step": 8258 + }, + { + "epoch": 5.331826985151711, + "grad_norm": 1.4175799195830314, + "learning_rate": 1.465906275918709e-05, + "loss": 0.5345, + "step": 8259 + }, + { + "epoch": 5.332472562943835, + "grad_norm": 1.4200111326774443, + "learning_rate": 1.463121928131305e-05, + "loss": 0.491, + "step": 8260 + }, + { + "epoch": 5.333118140735959, + "grad_norm": 1.2638823297780384, + "learning_rate": 1.4603400915753255e-05, + "loss": 0.4158, + "step": 8261 + }, + { + "epoch": 5.333763718528083, + "grad_norm": 1.3053926834034701, + "learning_rate": 1.4575607667668393e-05, + "loss": 0.5179, + "step": 8262 + }, + { + "epoch": 5.3344092963202066, + "grad_norm": 1.5485143865383226, + "learning_rate": 1.454783954221429e-05, + "loss": 0.5625, + "step": 8263 + }, + { + "epoch": 5.3350548741123305, + "grad_norm": 1.3208554511379849, + "learning_rate": 1.4520096544542292e-05, + "loss": 0.4763, + "step": 8264 + }, + { + "epoch": 5.335700451904454, + "grad_norm": 1.4484151600397062, + "learning_rate": 1.4492378679799033e-05, + "loss": 0.5864, + "step": 8265 + }, + { + "epoch": 5.336346029696578, + "grad_norm": 1.3609662839390022, + "learning_rate": 1.4464685953126381e-05, + "loss": 0.5213, + "step": 8266 + }, + { + "epoch": 5.336991607488702, + "grad_norm": 1.4093312628690498, + "learning_rate": 1.4437018369661678e-05, + "loss": 0.4931, + "step": 8267 + }, + { + "epoch": 5.337637185280826, + "grad_norm": 1.5222169153224316, + "learning_rate": 1.4409375934537586e-05, + "loss": 0.5419, + "step": 8268 + }, + { + "epoch": 5.33828276307295, + "grad_norm": 1.3798235700204884, + "learning_rate": 1.4381758652881952e-05, + "loss": 0.5184, + "step": 8269 + }, + { + "epoch": 5.338928340865074, + "grad_norm": 1.5704139906891361, + "learning_rate": 1.4354166529818162e-05, + "loss": 0.4882, + "step": 8270 + }, + { + "epoch": 5.339573918657198, + "grad_norm": 1.3391740817362505, + "learning_rate": 1.4326599570464853e-05, + "loss": 0.5115, + "step": 8271 + }, + { + "epoch": 5.340219496449322, + "grad_norm": 1.3771275567754566, + "learning_rate": 1.4299057779935885e-05, + "loss": 0.5043, + "step": 8272 + }, + { + "epoch": 5.340865074241446, + "grad_norm": 1.3390391531699626, + "learning_rate": 1.4271541163340622e-05, + "loss": 0.5013, + "step": 8273 + }, + { + "epoch": 5.34151065203357, + "grad_norm": 1.369489944268876, + "learning_rate": 1.4244049725783662e-05, + "loss": 0.4359, + "step": 8274 + }, + { + "epoch": 5.342156229825694, + "grad_norm": 1.340149361687464, + "learning_rate": 1.4216583472364963e-05, + "loss": 0.4521, + "step": 8275 + }, + { + "epoch": 5.342801807617818, + "grad_norm": 1.2374347616827308, + "learning_rate": 1.4189142408179793e-05, + "loss": 0.3829, + "step": 8276 + }, + { + "epoch": 5.343447385409942, + "grad_norm": 1.270572005477728, + "learning_rate": 1.4161726538318752e-05, + "loss": 0.4643, + "step": 8277 + }, + { + "epoch": 5.3440929632020655, + "grad_norm": 1.32190602276509, + "learning_rate": 1.413433586786777e-05, + "loss": 0.4886, + "step": 8278 + }, + { + "epoch": 5.344738540994189, + "grad_norm": 1.3507633227216016, + "learning_rate": 1.4106970401908096e-05, + "loss": 0.4968, + "step": 8279 + }, + { + "epoch": 5.345384118786313, + "grad_norm": 1.467163712466945, + "learning_rate": 1.4079630145516307e-05, + "loss": 0.5697, + "step": 8280 + }, + { + "epoch": 5.346029696578437, + "grad_norm": 1.3117957610679163, + "learning_rate": 1.4052315103764328e-05, + "loss": 0.4366, + "step": 8281 + }, + { + "epoch": 5.346675274370561, + "grad_norm": 1.512494989765302, + "learning_rate": 1.4025025281719354e-05, + "loss": 0.4884, + "step": 8282 + }, + { + "epoch": 5.347320852162685, + "grad_norm": 1.3856130884561715, + "learning_rate": 1.3997760684443903e-05, + "loss": 0.4281, + "step": 8283 + }, + { + "epoch": 5.347966429954809, + "grad_norm": 1.4989812764566892, + "learning_rate": 1.3970521316995947e-05, + "loss": 0.4845, + "step": 8284 + }, + { + "epoch": 5.348612007746934, + "grad_norm": 1.1746233266319754, + "learning_rate": 1.3943307184428576e-05, + "loss": 0.3732, + "step": 8285 + }, + { + "epoch": 5.349257585539058, + "grad_norm": 1.3459328740243255, + "learning_rate": 1.3916118291790285e-05, + "loss": 0.4848, + "step": 8286 + }, + { + "epoch": 5.349903163331182, + "grad_norm": 1.5142696100261412, + "learning_rate": 1.388895464412499e-05, + "loss": 0.5489, + "step": 8287 + }, + { + "epoch": 5.350548741123306, + "grad_norm": 1.328087829772906, + "learning_rate": 1.3861816246471724e-05, + "loss": 0.4867, + "step": 8288 + }, + { + "epoch": 5.35119431891543, + "grad_norm": 1.3686408321011263, + "learning_rate": 1.3834703103864997e-05, + "loss": 0.489, + "step": 8289 + }, + { + "epoch": 5.351839896707554, + "grad_norm": 1.2176468686145023, + "learning_rate": 1.3807615221334618e-05, + "loss": 0.4375, + "step": 8290 + }, + { + "epoch": 5.3524854744996775, + "grad_norm": 1.4744478241799837, + "learning_rate": 1.3780552603905565e-05, + "loss": 0.518, + "step": 8291 + }, + { + "epoch": 5.353131052291801, + "grad_norm": 1.4240548576447218, + "learning_rate": 1.3753515256598307e-05, + "loss": 0.5193, + "step": 8292 + }, + { + "epoch": 5.353776630083925, + "grad_norm": 1.4701271108279237, + "learning_rate": 1.3726503184428583e-05, + "loss": 0.5301, + "step": 8293 + }, + { + "epoch": 5.354422207876049, + "grad_norm": 1.3717396925782268, + "learning_rate": 1.36995163924073e-05, + "loss": 0.4939, + "step": 8294 + }, + { + "epoch": 5.355067785668173, + "grad_norm": 1.3947865235625985, + "learning_rate": 1.3672554885540904e-05, + "loss": 0.4906, + "step": 8295 + }, + { + "epoch": 5.355713363460297, + "grad_norm": 1.4509273900737305, + "learning_rate": 1.3645618668830992e-05, + "loss": 0.5817, + "step": 8296 + }, + { + "epoch": 5.356358941252421, + "grad_norm": 1.4389645416403565, + "learning_rate": 1.3618707747274472e-05, + "loss": 0.4995, + "step": 8297 + }, + { + "epoch": 5.357004519044545, + "grad_norm": 1.333508132762975, + "learning_rate": 1.3591822125863644e-05, + "loss": 0.4913, + "step": 8298 + }, + { + "epoch": 5.357650096836669, + "grad_norm": 1.2269817770410223, + "learning_rate": 1.3564961809586056e-05, + "loss": 0.4524, + "step": 8299 + }, + { + "epoch": 5.358295674628793, + "grad_norm": 1.6367995531404165, + "learning_rate": 1.3538126803424587e-05, + "loss": 0.6254, + "step": 8300 + }, + { + "epoch": 5.358941252420917, + "grad_norm": 1.3696824571358386, + "learning_rate": 1.3511317112357406e-05, + "loss": 0.4921, + "step": 8301 + }, + { + "epoch": 5.359586830213041, + "grad_norm": 1.533084681889731, + "learning_rate": 1.3484532741357984e-05, + "loss": 0.5487, + "step": 8302 + }, + { + "epoch": 5.360232408005165, + "grad_norm": 1.2654917690011582, + "learning_rate": 1.3457773695395096e-05, + "loss": 0.4423, + "step": 8303 + }, + { + "epoch": 5.360877985797289, + "grad_norm": 1.2275084897458624, + "learning_rate": 1.3431039979432839e-05, + "loss": 0.4331, + "step": 8304 + }, + { + "epoch": 5.3615235635894125, + "grad_norm": 1.206538303725013, + "learning_rate": 1.340433159843058e-05, + "loss": 0.4505, + "step": 8305 + }, + { + "epoch": 5.362169141381536, + "grad_norm": 1.3076570671334748, + "learning_rate": 1.3377648557342985e-05, + "loss": 0.4505, + "step": 8306 + }, + { + "epoch": 5.36281471917366, + "grad_norm": 1.412353017044183, + "learning_rate": 1.3350990861120082e-05, + "loss": 0.5256, + "step": 8307 + }, + { + "epoch": 5.363460296965784, + "grad_norm": 1.246229747062476, + "learning_rate": 1.332435851470708e-05, + "loss": 0.4802, + "step": 8308 + }, + { + "epoch": 5.364105874757908, + "grad_norm": 1.3550711914115487, + "learning_rate": 1.3297751523044659e-05, + "loss": 0.5022, + "step": 8309 + }, + { + "epoch": 5.364751452550032, + "grad_norm": 1.6816255244150093, + "learning_rate": 1.3271169891068589e-05, + "loss": 0.4349, + "step": 8310 + }, + { + "epoch": 5.365397030342156, + "grad_norm": 1.3294219152843962, + "learning_rate": 1.3244613623710077e-05, + "loss": 0.4717, + "step": 8311 + }, + { + "epoch": 5.36604260813428, + "grad_norm": 1.4371290068930234, + "learning_rate": 1.3218082725895629e-05, + "loss": 0.5973, + "step": 8312 + }, + { + "epoch": 5.366688185926404, + "grad_norm": 1.4647770506705209, + "learning_rate": 1.319157720254691e-05, + "loss": 0.4987, + "step": 8313 + }, + { + "epoch": 5.367333763718528, + "grad_norm": 1.5489034392765242, + "learning_rate": 1.3165097058581053e-05, + "loss": 0.5321, + "step": 8314 + }, + { + "epoch": 5.367979341510652, + "grad_norm": 1.421187135236845, + "learning_rate": 1.3138642298910412e-05, + "loss": 0.5362, + "step": 8315 + }, + { + "epoch": 5.368624919302776, + "grad_norm": 1.4677011874493202, + "learning_rate": 1.3112212928442511e-05, + "loss": 0.4612, + "step": 8316 + }, + { + "epoch": 5.3692704970949, + "grad_norm": 1.5992398274801831, + "learning_rate": 1.3085808952080362e-05, + "loss": 0.587, + "step": 8317 + }, + { + "epoch": 5.369916074887024, + "grad_norm": 1.524736449340101, + "learning_rate": 1.305943037472218e-05, + "loss": 0.5391, + "step": 8318 + }, + { + "epoch": 5.3705616526791475, + "grad_norm": 1.4599308714856636, + "learning_rate": 1.3033077201261399e-05, + "loss": 0.4562, + "step": 8319 + }, + { + "epoch": 5.3712072304712715, + "grad_norm": 1.2954267795926988, + "learning_rate": 1.300674943658686e-05, + "loss": 0.459, + "step": 8320 + }, + { + "epoch": 5.371852808263395, + "grad_norm": 1.4205160384683966, + "learning_rate": 1.298044708558264e-05, + "loss": 0.4963, + "step": 8321 + }, + { + "epoch": 5.372498386055519, + "grad_norm": 1.7080790983561311, + "learning_rate": 1.2954170153128068e-05, + "loss": 0.4996, + "step": 8322 + }, + { + "epoch": 5.373143963847643, + "grad_norm": 1.427666341315315, + "learning_rate": 1.2927918644097812e-05, + "loss": 0.4829, + "step": 8323 + }, + { + "epoch": 5.373789541639767, + "grad_norm": 1.2980626736554692, + "learning_rate": 1.2901692563361776e-05, + "loss": 0.4684, + "step": 8324 + }, + { + "epoch": 5.374435119431892, + "grad_norm": 1.3095323987093441, + "learning_rate": 1.2875491915785186e-05, + "loss": 0.4878, + "step": 8325 + }, + { + "epoch": 5.375080697224016, + "grad_norm": 1.6876638268641355, + "learning_rate": 1.2849316706228536e-05, + "loss": 0.5786, + "step": 8326 + }, + { + "epoch": 5.37572627501614, + "grad_norm": 1.4785400556592827, + "learning_rate": 1.2823166939547591e-05, + "loss": 0.4644, + "step": 8327 + }, + { + "epoch": 5.376371852808264, + "grad_norm": 1.359292017231472, + "learning_rate": 1.2797042620593406e-05, + "loss": 0.5128, + "step": 8328 + }, + { + "epoch": 5.377017430600388, + "grad_norm": 1.3687985720928673, + "learning_rate": 1.277094375421232e-05, + "loss": 0.5638, + "step": 8329 + }, + { + "epoch": 5.377663008392512, + "grad_norm": 1.4803154591121501, + "learning_rate": 1.2744870345245928e-05, + "loss": 0.5916, + "step": 8330 + }, + { + "epoch": 5.378308586184636, + "grad_norm": 1.3305246030174924, + "learning_rate": 1.2718822398531142e-05, + "loss": 0.5078, + "step": 8331 + }, + { + "epoch": 5.3789541639767595, + "grad_norm": 1.366257555121968, + "learning_rate": 1.2692799918900098e-05, + "loss": 0.4931, + "step": 8332 + }, + { + "epoch": 5.3795997417688834, + "grad_norm": 1.28171326510041, + "learning_rate": 1.2666802911180236e-05, + "loss": 0.4714, + "step": 8333 + }, + { + "epoch": 5.380245319561007, + "grad_norm": 1.3461035145036766, + "learning_rate": 1.2640831380194344e-05, + "loss": 0.5042, + "step": 8334 + }, + { + "epoch": 5.380890897353131, + "grad_norm": 1.2840808551324756, + "learning_rate": 1.2614885330760305e-05, + "loss": 0.5024, + "step": 8335 + }, + { + "epoch": 5.381536475145255, + "grad_norm": 1.569558733602245, + "learning_rate": 1.2588964767691418e-05, + "loss": 0.6643, + "step": 8336 + }, + { + "epoch": 5.382182052937379, + "grad_norm": 1.2188012040336185, + "learning_rate": 1.2563069695796285e-05, + "loss": 0.4217, + "step": 8337 + }, + { + "epoch": 5.382827630729503, + "grad_norm": 1.4055486052955932, + "learning_rate": 1.2537200119878599e-05, + "loss": 0.5055, + "step": 8338 + }, + { + "epoch": 5.383473208521627, + "grad_norm": 1.3580821910765035, + "learning_rate": 1.2511356044737504e-05, + "loss": 0.5383, + "step": 8339 + }, + { + "epoch": 5.384118786313751, + "grad_norm": 1.4681743440760848, + "learning_rate": 1.2485537475167363e-05, + "loss": 0.5605, + "step": 8340 + }, + { + "epoch": 5.384764364105875, + "grad_norm": 1.348033193566408, + "learning_rate": 1.2459744415957695e-05, + "loss": 0.5062, + "step": 8341 + }, + { + "epoch": 5.385409941897999, + "grad_norm": 1.2642286943437933, + "learning_rate": 1.2433976871893458e-05, + "loss": 0.4918, + "step": 8342 + }, + { + "epoch": 5.386055519690123, + "grad_norm": 1.6327187177278695, + "learning_rate": 1.2408234847754828e-05, + "loss": 0.7035, + "step": 8343 + }, + { + "epoch": 5.386701097482247, + "grad_norm": 1.420537894648891, + "learning_rate": 1.23825183483171e-05, + "loss": 0.4756, + "step": 8344 + }, + { + "epoch": 5.387346675274371, + "grad_norm": 1.29358311361492, + "learning_rate": 1.2356827378351058e-05, + "loss": 0.4564, + "step": 8345 + }, + { + "epoch": 5.3879922530664945, + "grad_norm": 1.3747167870675332, + "learning_rate": 1.2331161942622609e-05, + "loss": 0.4633, + "step": 8346 + }, + { + "epoch": 5.3886378308586185, + "grad_norm": 1.444234465018468, + "learning_rate": 1.2305522045892958e-05, + "loss": 0.5105, + "step": 8347 + }, + { + "epoch": 5.389283408650742, + "grad_norm": 1.3165414724888174, + "learning_rate": 1.2279907692918567e-05, + "loss": 0.4857, + "step": 8348 + }, + { + "epoch": 5.389928986442866, + "grad_norm": 1.6976401887872488, + "learning_rate": 1.2254318888451171e-05, + "loss": 0.4532, + "step": 8349 + }, + { + "epoch": 5.39057456423499, + "grad_norm": 1.2556771514646334, + "learning_rate": 1.2228755637237752e-05, + "loss": 0.51, + "step": 8350 + }, + { + "epoch": 5.391220142027114, + "grad_norm": 1.437212904531371, + "learning_rate": 1.2203217944020554e-05, + "loss": 0.4793, + "step": 8351 + }, + { + "epoch": 5.391865719819238, + "grad_norm": 1.3897161835508984, + "learning_rate": 1.2177705813537103e-05, + "loss": 0.4038, + "step": 8352 + }, + { + "epoch": 5.392511297611362, + "grad_norm": 1.2905913796219906, + "learning_rate": 1.2152219250520163e-05, + "loss": 0.4082, + "step": 8353 + }, + { + "epoch": 5.393156875403486, + "grad_norm": 1.3061917502772256, + "learning_rate": 1.2126758259697732e-05, + "loss": 0.5311, + "step": 8354 + }, + { + "epoch": 5.39380245319561, + "grad_norm": 1.3959460788012465, + "learning_rate": 1.2101322845793105e-05, + "loss": 0.4836, + "step": 8355 + }, + { + "epoch": 5.394448030987734, + "grad_norm": 1.3351916166823978, + "learning_rate": 1.2075913013524819e-05, + "loss": 0.5053, + "step": 8356 + }, + { + "epoch": 5.395093608779858, + "grad_norm": 1.4892493723510227, + "learning_rate": 1.2050528767606653e-05, + "loss": 0.4682, + "step": 8357 + }, + { + "epoch": 5.395739186571982, + "grad_norm": 1.486740417964268, + "learning_rate": 1.2025170112747628e-05, + "loss": 0.5255, + "step": 8358 + }, + { + "epoch": 5.396384764364106, + "grad_norm": 1.461835801499529, + "learning_rate": 1.1999837053652095e-05, + "loss": 0.4708, + "step": 8359 + }, + { + "epoch": 5.39703034215623, + "grad_norm": 1.7813222021023893, + "learning_rate": 1.197452959501956e-05, + "loss": 0.465, + "step": 8360 + }, + { + "epoch": 5.3976759199483535, + "grad_norm": 1.3849190867153294, + "learning_rate": 1.1949247741544788e-05, + "loss": 0.5162, + "step": 8361 + }, + { + "epoch": 5.398321497740477, + "grad_norm": 1.4795066722759056, + "learning_rate": 1.1923991497917923e-05, + "loss": 0.5418, + "step": 8362 + }, + { + "epoch": 5.398967075532601, + "grad_norm": 1.3548127560794025, + "learning_rate": 1.1898760868824153e-05, + "loss": 0.59, + "step": 8363 + }, + { + "epoch": 5.399612653324725, + "grad_norm": 1.5764024021460734, + "learning_rate": 1.1873555858944083e-05, + "loss": 0.4823, + "step": 8364 + }, + { + "epoch": 5.400258231116849, + "grad_norm": 1.3653136137622002, + "learning_rate": 1.1848376472953509e-05, + "loss": 0.5078, + "step": 8365 + }, + { + "epoch": 5.400903808908973, + "grad_norm": 1.49260612512578, + "learning_rate": 1.1823222715523405e-05, + "loss": 0.5908, + "step": 8366 + }, + { + "epoch": 5.401549386701097, + "grad_norm": 1.4244498974657893, + "learning_rate": 1.1798094591320128e-05, + "loss": 0.5172, + "step": 8367 + }, + { + "epoch": 5.402194964493221, + "grad_norm": 1.5394140670593195, + "learning_rate": 1.1772992105005196e-05, + "loss": 0.4941, + "step": 8368 + }, + { + "epoch": 5.402840542285345, + "grad_norm": 1.456615973871738, + "learning_rate": 1.1747915261235302e-05, + "loss": 0.5218, + "step": 8369 + }, + { + "epoch": 5.403486120077469, + "grad_norm": 1.2949213611318997, + "learning_rate": 1.1722864064662557e-05, + "loss": 0.4975, + "step": 8370 + }, + { + "epoch": 5.404131697869594, + "grad_norm": 1.7103357434592334, + "learning_rate": 1.1697838519934177e-05, + "loss": 0.5483, + "step": 8371 + }, + { + "epoch": 5.404777275661718, + "grad_norm": 1.5563183511076988, + "learning_rate": 1.1672838631692666e-05, + "loss": 0.4599, + "step": 8372 + }, + { + "epoch": 5.4054228534538415, + "grad_norm": 1.4447468662096017, + "learning_rate": 1.1647864404575764e-05, + "loss": 0.5989, + "step": 8373 + }, + { + "epoch": 5.4060684312459655, + "grad_norm": 1.3177658482357415, + "learning_rate": 1.162291584321643e-05, + "loss": 0.4853, + "step": 8374 + }, + { + "epoch": 5.406714009038089, + "grad_norm": 1.5630519004491312, + "learning_rate": 1.1597992952242913e-05, + "loss": 0.5152, + "step": 8375 + }, + { + "epoch": 5.407359586830213, + "grad_norm": 1.3766332253910323, + "learning_rate": 1.1573095736278648e-05, + "loss": 0.5367, + "step": 8376 + }, + { + "epoch": 5.408005164622337, + "grad_norm": 1.285583460196282, + "learning_rate": 1.154822419994234e-05, + "loss": 0.4518, + "step": 8377 + }, + { + "epoch": 5.408650742414461, + "grad_norm": 1.562136601338632, + "learning_rate": 1.15233783478479e-05, + "loss": 0.4473, + "step": 8378 + }, + { + "epoch": 5.409296320206585, + "grad_norm": 1.327910746081459, + "learning_rate": 1.1498558184604489e-05, + "loss": 0.4351, + "step": 8379 + }, + { + "epoch": 5.409941897998709, + "grad_norm": 1.5447778206736644, + "learning_rate": 1.1473763714816526e-05, + "loss": 0.5656, + "step": 8380 + }, + { + "epoch": 5.410587475790833, + "grad_norm": 1.378234820968543, + "learning_rate": 1.1448994943083644e-05, + "loss": 0.46, + "step": 8381 + }, + { + "epoch": 5.411233053582957, + "grad_norm": 1.4618053524202097, + "learning_rate": 1.1424251874000673e-05, + "loss": 0.5201, + "step": 8382 + }, + { + "epoch": 5.411878631375081, + "grad_norm": 1.255816302474402, + "learning_rate": 1.139953451215772e-05, + "loss": 0.4652, + "step": 8383 + }, + { + "epoch": 5.412524209167205, + "grad_norm": 1.7305867964471666, + "learning_rate": 1.1374842862140188e-05, + "loss": 0.5421, + "step": 8384 + }, + { + "epoch": 5.413169786959329, + "grad_norm": 1.4127355234558479, + "learning_rate": 1.1350176928528526e-05, + "loss": 0.487, + "step": 8385 + }, + { + "epoch": 5.413815364751453, + "grad_norm": 1.5468099890347935, + "learning_rate": 1.1325536715898542e-05, + "loss": 0.5585, + "step": 8386 + }, + { + "epoch": 5.414460942543577, + "grad_norm": 1.3185230513037867, + "learning_rate": 1.130092222882133e-05, + "loss": 0.4079, + "step": 8387 + }, + { + "epoch": 5.4151065203357005, + "grad_norm": 1.4330879872002953, + "learning_rate": 1.1276333471863037e-05, + "loss": 0.4685, + "step": 8388 + }, + { + "epoch": 5.415752098127824, + "grad_norm": 1.307735376231645, + "learning_rate": 1.1251770449585179e-05, + "loss": 0.4571, + "step": 8389 + }, + { + "epoch": 5.416397675919948, + "grad_norm": 1.4419530490977563, + "learning_rate": 1.122723316654448e-05, + "loss": 0.4989, + "step": 8390 + }, + { + "epoch": 5.417043253712072, + "grad_norm": 1.3646684793382147, + "learning_rate": 1.1202721627292783e-05, + "loss": 0.4981, + "step": 8391 + }, + { + "epoch": 5.417688831504196, + "grad_norm": 1.2171868403985369, + "learning_rate": 1.1178235836377298e-05, + "loss": 0.4498, + "step": 8392 + }, + { + "epoch": 5.41833440929632, + "grad_norm": 1.3732203600563468, + "learning_rate": 1.1153775798340392e-05, + "loss": 0.4892, + "step": 8393 + }, + { + "epoch": 5.418979987088444, + "grad_norm": 1.4054748495909763, + "learning_rate": 1.1129341517719603e-05, + "loss": 0.4531, + "step": 8394 + }, + { + "epoch": 5.419625564880568, + "grad_norm": 1.3043117088954383, + "learning_rate": 1.1104932999047789e-05, + "loss": 0.5515, + "step": 8395 + }, + { + "epoch": 5.420271142672692, + "grad_norm": 1.3262557250200557, + "learning_rate": 1.1080550246852975e-05, + "loss": 0.4745, + "step": 8396 + }, + { + "epoch": 5.420916720464816, + "grad_norm": 1.4233820175265646, + "learning_rate": 1.1056193265658398e-05, + "loss": 0.5107, + "step": 8397 + }, + { + "epoch": 5.42156229825694, + "grad_norm": 1.3271698487022487, + "learning_rate": 1.1031862059982555e-05, + "loss": 0.4811, + "step": 8398 + }, + { + "epoch": 5.422207876049064, + "grad_norm": 1.4555697681794797, + "learning_rate": 1.1007556634339138e-05, + "loss": 0.5217, + "step": 8399 + }, + { + "epoch": 5.422853453841188, + "grad_norm": 1.4834633334541987, + "learning_rate": 1.0983276993237039e-05, + "loss": 0.5228, + "step": 8400 + }, + { + "epoch": 5.423499031633312, + "grad_norm": 1.35646917074859, + "learning_rate": 1.0959023141180373e-05, + "loss": 0.4403, + "step": 8401 + }, + { + "epoch": 5.4241446094254355, + "grad_norm": 1.3938125095877862, + "learning_rate": 1.0934795082668507e-05, + "loss": 0.4629, + "step": 8402 + }, + { + "epoch": 5.4247901872175595, + "grad_norm": 1.5412888525938666, + "learning_rate": 1.0910592822195996e-05, + "loss": 0.4976, + "step": 8403 + }, + { + "epoch": 5.425435765009683, + "grad_norm": 1.329392265396292, + "learning_rate": 1.0886416364252599e-05, + "loss": 0.471, + "step": 8404 + }, + { + "epoch": 5.426081342801807, + "grad_norm": 1.354465964118527, + "learning_rate": 1.0862265713323293e-05, + "loss": 0.4773, + "step": 8405 + }, + { + "epoch": 5.426726920593931, + "grad_norm": 1.2924672691494004, + "learning_rate": 1.0838140873888296e-05, + "loss": 0.502, + "step": 8406 + }, + { + "epoch": 5.427372498386055, + "grad_norm": 2.318804903343374, + "learning_rate": 1.0814041850423011e-05, + "loss": 0.5829, + "step": 8407 + }, + { + "epoch": 5.428018076178179, + "grad_norm": 1.4046463532956608, + "learning_rate": 1.0789968647398013e-05, + "loss": 0.5635, + "step": 8408 + }, + { + "epoch": 5.428663653970303, + "grad_norm": 1.4845826161895506, + "learning_rate": 1.0765921269279227e-05, + "loss": 0.5289, + "step": 8409 + }, + { + "epoch": 5.429309231762427, + "grad_norm": 1.4735230174722602, + "learning_rate": 1.0741899720527603e-05, + "loss": 0.4999, + "step": 8410 + }, + { + "epoch": 5.429954809554552, + "grad_norm": 1.8614017246722678, + "learning_rate": 1.0717904005599393e-05, + "loss": 0.5014, + "step": 8411 + }, + { + "epoch": 5.430600387346676, + "grad_norm": 1.2881569759761426, + "learning_rate": 1.0693934128946118e-05, + "loss": 0.4387, + "step": 8412 + }, + { + "epoch": 5.4312459651388, + "grad_norm": 1.2368800572962324, + "learning_rate": 1.0669990095014358e-05, + "loss": 0.4378, + "step": 8413 + }, + { + "epoch": 5.431891542930924, + "grad_norm": 1.4582744094016704, + "learning_rate": 1.064607190824604e-05, + "loss": 0.5301, + "step": 8414 + }, + { + "epoch": 5.4325371207230475, + "grad_norm": 1.6102117742387743, + "learning_rate": 1.0622179573078249e-05, + "loss": 0.5639, + "step": 8415 + }, + { + "epoch": 5.433182698515171, + "grad_norm": 1.2779617590912864, + "learning_rate": 1.0598313093943173e-05, + "loss": 0.4905, + "step": 8416 + }, + { + "epoch": 5.433828276307295, + "grad_norm": 1.3689320011255701, + "learning_rate": 1.057447247526837e-05, + "loss": 0.519, + "step": 8417 + }, + { + "epoch": 5.434473854099419, + "grad_norm": 1.4486333272155436, + "learning_rate": 1.055065772147652e-05, + "loss": 0.5038, + "step": 8418 + }, + { + "epoch": 5.435119431891543, + "grad_norm": 1.611194511175574, + "learning_rate": 1.0526868836985436e-05, + "loss": 0.5485, + "step": 8419 + }, + { + "epoch": 5.435765009683667, + "grad_norm": 1.4097899447762834, + "learning_rate": 1.0503105826208291e-05, + "loss": 0.5686, + "step": 8420 + }, + { + "epoch": 5.436410587475791, + "grad_norm": 1.4423456424102241, + "learning_rate": 1.0479368693553326e-05, + "loss": 0.4627, + "step": 8421 + }, + { + "epoch": 5.437056165267915, + "grad_norm": 1.3806872837248596, + "learning_rate": 1.0455657443424014e-05, + "loss": 0.5326, + "step": 8422 + }, + { + "epoch": 5.437701743060039, + "grad_norm": 1.4227950259422564, + "learning_rate": 1.0431972080219075e-05, + "loss": 0.518, + "step": 8423 + }, + { + "epoch": 5.438347320852163, + "grad_norm": 1.2423013434858199, + "learning_rate": 1.0408312608332359e-05, + "loss": 0.4577, + "step": 8424 + }, + { + "epoch": 5.438992898644287, + "grad_norm": 1.2326024933882607, + "learning_rate": 1.038467903215297e-05, + "loss": 0.421, + "step": 8425 + }, + { + "epoch": 5.439638476436411, + "grad_norm": 1.4824901695679629, + "learning_rate": 1.0361071356065153e-05, + "loss": 0.4779, + "step": 8426 + }, + { + "epoch": 5.440284054228535, + "grad_norm": 1.2691565717719644, + "learning_rate": 1.0337489584448389e-05, + "loss": 0.4725, + "step": 8427 + }, + { + "epoch": 5.440929632020659, + "grad_norm": 1.67632755343728, + "learning_rate": 1.0313933721677358e-05, + "loss": 0.5362, + "step": 8428 + }, + { + "epoch": 5.4415752098127825, + "grad_norm": 1.3842768506814778, + "learning_rate": 1.0290403772121886e-05, + "loss": 0.519, + "step": 8429 + }, + { + "epoch": 5.4422207876049065, + "grad_norm": 1.4269018238480682, + "learning_rate": 1.0266899740147044e-05, + "loss": 0.4923, + "step": 8430 + }, + { + "epoch": 5.44286636539703, + "grad_norm": 1.5052561651404095, + "learning_rate": 1.024342163011308e-05, + "loss": 0.5548, + "step": 8431 + }, + { + "epoch": 5.443511943189154, + "grad_norm": 1.5624509193822036, + "learning_rate": 1.0219969446375408e-05, + "loss": 0.5809, + "step": 8432 + }, + { + "epoch": 5.444157520981278, + "grad_norm": 1.4625554818777489, + "learning_rate": 1.0196543193284634e-05, + "loss": 0.5378, + "step": 8433 + }, + { + "epoch": 5.444803098773402, + "grad_norm": 1.4391798400074158, + "learning_rate": 1.0173142875186646e-05, + "loss": 0.4331, + "step": 8434 + }, + { + "epoch": 5.445448676565526, + "grad_norm": 1.3431420116742776, + "learning_rate": 1.0149768496422373e-05, + "loss": 0.5058, + "step": 8435 + }, + { + "epoch": 5.44609425435765, + "grad_norm": 1.611130443603802, + "learning_rate": 1.0126420061328011e-05, + "loss": 0.56, + "step": 8436 + }, + { + "epoch": 5.446739832149774, + "grad_norm": 1.3952164668028937, + "learning_rate": 1.0103097574234997e-05, + "loss": 0.5296, + "step": 8437 + }, + { + "epoch": 5.447385409941898, + "grad_norm": 1.2957437426671168, + "learning_rate": 1.0079801039469803e-05, + "loss": 0.4536, + "step": 8438 + }, + { + "epoch": 5.448030987734022, + "grad_norm": 1.30999073265878, + "learning_rate": 1.0056530461354257e-05, + "loss": 0.3935, + "step": 8439 + }, + { + "epoch": 5.448676565526146, + "grad_norm": 1.4074460540487839, + "learning_rate": 1.0033285844205285e-05, + "loss": 0.5155, + "step": 8440 + }, + { + "epoch": 5.44932214331827, + "grad_norm": 1.4127611865516843, + "learning_rate": 1.0010067192334925e-05, + "loss": 0.538, + "step": 8441 + }, + { + "epoch": 5.449967721110394, + "grad_norm": 1.3302080952973814, + "learning_rate": 9.986874510050579e-06, + "loss": 0.425, + "step": 8442 + }, + { + "epoch": 5.450613298902518, + "grad_norm": 1.4267874225059891, + "learning_rate": 9.963707801654692e-06, + "loss": 0.5184, + "step": 8443 + }, + { + "epoch": 5.4512588766946415, + "grad_norm": 1.404157321096108, + "learning_rate": 9.940567071444889e-06, + "loss": 0.6391, + "step": 8444 + }, + { + "epoch": 5.451904454486765, + "grad_norm": 1.3449645113693336, + "learning_rate": 9.917452323714086e-06, + "loss": 0.3899, + "step": 8445 + }, + { + "epoch": 5.452550032278889, + "grad_norm": 1.7518946720704198, + "learning_rate": 9.894363562750257e-06, + "loss": 0.5629, + "step": 8446 + }, + { + "epoch": 5.453195610071013, + "grad_norm": 1.3847847955380646, + "learning_rate": 9.871300792836623e-06, + "loss": 0.4596, + "step": 8447 + }, + { + "epoch": 5.453841187863137, + "grad_norm": 1.2549898740235093, + "learning_rate": 9.848264018251578e-06, + "loss": 0.4816, + "step": 8448 + }, + { + "epoch": 5.454486765655261, + "grad_norm": 1.4326907422470896, + "learning_rate": 9.825253243268671e-06, + "loss": 0.5645, + "step": 8449 + }, + { + "epoch": 5.455132343447385, + "grad_norm": 1.4584573706731396, + "learning_rate": 9.802268472156638e-06, + "loss": 0.5272, + "step": 8450 + }, + { + "epoch": 5.455777921239509, + "grad_norm": 1.270537515409029, + "learning_rate": 9.7793097091794e-06, + "loss": 0.4451, + "step": 8451 + }, + { + "epoch": 5.456423499031633, + "grad_norm": 1.4795819378452877, + "learning_rate": 9.756376958596034e-06, + "loss": 0.5572, + "step": 8452 + }, + { + "epoch": 5.457069076823757, + "grad_norm": 1.5356914166944755, + "learning_rate": 9.733470224660823e-06, + "loss": 0.5323, + "step": 8453 + }, + { + "epoch": 5.457714654615881, + "grad_norm": 1.4744789413865007, + "learning_rate": 9.710589511623196e-06, + "loss": 0.5446, + "step": 8454 + }, + { + "epoch": 5.458360232408005, + "grad_norm": 1.3552611247038002, + "learning_rate": 9.687734823727744e-06, + "loss": 0.4657, + "step": 8455 + }, + { + "epoch": 5.459005810200129, + "grad_norm": 1.330478937170322, + "learning_rate": 9.664906165214275e-06, + "loss": 0.5348, + "step": 8456 + }, + { + "epoch": 5.4596513879922535, + "grad_norm": 1.264269743584272, + "learning_rate": 9.64210354031772e-06, + "loss": 0.403, + "step": 8457 + }, + { + "epoch": 5.460296965784377, + "grad_norm": 1.4611999542118623, + "learning_rate": 9.619326953268208e-06, + "loss": 0.5714, + "step": 8458 + }, + { + "epoch": 5.460942543576501, + "grad_norm": 1.411950737195724, + "learning_rate": 9.596576408291078e-06, + "loss": 0.4379, + "step": 8459 + }, + { + "epoch": 5.461588121368625, + "grad_norm": 1.379429893565566, + "learning_rate": 9.573851909606723e-06, + "loss": 0.4844, + "step": 8460 + }, + { + "epoch": 5.462233699160749, + "grad_norm": 1.7614910723697854, + "learning_rate": 9.551153461430782e-06, + "loss": 0.5687, + "step": 8461 + }, + { + "epoch": 5.462879276952873, + "grad_norm": 1.360849632188149, + "learning_rate": 9.528481067974125e-06, + "loss": 0.5534, + "step": 8462 + }, + { + "epoch": 5.463524854744997, + "grad_norm": 1.6530710633573031, + "learning_rate": 9.505834733442618e-06, + "loss": 0.5503, + "step": 8463 + }, + { + "epoch": 5.464170432537121, + "grad_norm": 1.330461351097528, + "learning_rate": 9.48321446203747e-06, + "loss": 0.4903, + "step": 8464 + }, + { + "epoch": 5.464816010329245, + "grad_norm": 1.3173020385802015, + "learning_rate": 9.460620257954953e-06, + "loss": 0.4848, + "step": 8465 + }, + { + "epoch": 5.465461588121369, + "grad_norm": 1.560243204078884, + "learning_rate": 9.438052125386469e-06, + "loss": 0.4935, + "step": 8466 + }, + { + "epoch": 5.466107165913493, + "grad_norm": 1.3546950291146749, + "learning_rate": 9.415510068518734e-06, + "loss": 0.4752, + "step": 8467 + }, + { + "epoch": 5.466752743705617, + "grad_norm": 1.4720470171218891, + "learning_rate": 9.3929940915335e-06, + "loss": 0.5621, + "step": 8468 + }, + { + "epoch": 5.467398321497741, + "grad_norm": 1.3754417907153964, + "learning_rate": 9.370504198607664e-06, + "loss": 0.5283, + "step": 8469 + }, + { + "epoch": 5.468043899289865, + "grad_norm": 1.3918285736717535, + "learning_rate": 9.348040393913386e-06, + "loss": 0.5069, + "step": 8470 + }, + { + "epoch": 5.4686894770819885, + "grad_norm": 1.4131696812914005, + "learning_rate": 9.325602681617933e-06, + "loss": 0.5909, + "step": 8471 + }, + { + "epoch": 5.469335054874112, + "grad_norm": 1.3431709400472922, + "learning_rate": 9.303191065883725e-06, + "loss": 0.5385, + "step": 8472 + }, + { + "epoch": 5.469980632666236, + "grad_norm": 1.3022590328732033, + "learning_rate": 9.280805550868353e-06, + "loss": 0.4636, + "step": 8473 + }, + { + "epoch": 5.47062621045836, + "grad_norm": 1.4909004568655315, + "learning_rate": 9.258446140724546e-06, + "loss": 0.6443, + "step": 8474 + }, + { + "epoch": 5.471271788250484, + "grad_norm": 1.5392083480025869, + "learning_rate": 9.236112839600219e-06, + "loss": 0.4094, + "step": 8475 + }, + { + "epoch": 5.471917366042608, + "grad_norm": 1.330913428207604, + "learning_rate": 9.21380565163844e-06, + "loss": 0.4409, + "step": 8476 + }, + { + "epoch": 5.472562943834732, + "grad_norm": 1.4363289212972186, + "learning_rate": 9.191524580977416e-06, + "loss": 0.4634, + "step": 8477 + }, + { + "epoch": 5.473208521626856, + "grad_norm": 1.6268076695874807, + "learning_rate": 9.169269631750509e-06, + "loss": 0.515, + "step": 8478 + }, + { + "epoch": 5.47385409941898, + "grad_norm": 1.4632721431264415, + "learning_rate": 9.147040808086264e-06, + "loss": 0.5326, + "step": 8479 + }, + { + "epoch": 5.474499677211104, + "grad_norm": 1.5763206564659278, + "learning_rate": 9.124838114108335e-06, + "loss": 0.5358, + "step": 8480 + }, + { + "epoch": 5.475145255003228, + "grad_norm": 1.3326950693791275, + "learning_rate": 9.102661553935558e-06, + "loss": 0.5055, + "step": 8481 + }, + { + "epoch": 5.475790832795352, + "grad_norm": 1.4454534317162409, + "learning_rate": 9.080511131681928e-06, + "loss": 0.604, + "step": 8482 + }, + { + "epoch": 5.476436410587476, + "grad_norm": 1.6698230111655907, + "learning_rate": 9.058386851456539e-06, + "loss": 0.4863, + "step": 8483 + }, + { + "epoch": 5.4770819883796, + "grad_norm": 1.5185466634445952, + "learning_rate": 9.03628871736376e-06, + "loss": 0.5555, + "step": 8484 + }, + { + "epoch": 5.4777275661717235, + "grad_norm": 1.504437696382064, + "learning_rate": 9.014216733502927e-06, + "loss": 0.5378, + "step": 8485 + }, + { + "epoch": 5.4783731439638474, + "grad_norm": 1.2782663896784239, + "learning_rate": 8.992170903968665e-06, + "loss": 0.444, + "step": 8486 + }, + { + "epoch": 5.479018721755971, + "grad_norm": 1.302831718601299, + "learning_rate": 8.970151232850736e-06, + "loss": 0.4991, + "step": 8487 + }, + { + "epoch": 5.479664299548095, + "grad_norm": 1.3625505021138935, + "learning_rate": 8.948157724233957e-06, + "loss": 0.5062, + "step": 8488 + }, + { + "epoch": 5.480309877340219, + "grad_norm": 1.2912505025240668, + "learning_rate": 8.926190382198378e-06, + "loss": 0.4536, + "step": 8489 + }, + { + "epoch": 5.480955455132343, + "grad_norm": 1.9837549460790016, + "learning_rate": 8.904249210819209e-06, + "loss": 0.4984, + "step": 8490 + }, + { + "epoch": 5.481601032924467, + "grad_norm": 1.3828279054374821, + "learning_rate": 8.882334214166691e-06, + "loss": 0.4464, + "step": 8491 + }, + { + "epoch": 5.482246610716591, + "grad_norm": 1.3857726545823432, + "learning_rate": 8.860445396306327e-06, + "loss": 0.4976, + "step": 8492 + }, + { + "epoch": 5.482892188508715, + "grad_norm": 1.4798527788640654, + "learning_rate": 8.838582761298745e-06, + "loss": 0.6211, + "step": 8493 + }, + { + "epoch": 5.483537766300839, + "grad_norm": 1.6185542492034763, + "learning_rate": 8.816746313199625e-06, + "loss": 0.5511, + "step": 8494 + }, + { + "epoch": 5.484183344092963, + "grad_norm": 1.3980763508253098, + "learning_rate": 8.794936056059888e-06, + "loss": 0.4779, + "step": 8495 + }, + { + "epoch": 5.484828921885087, + "grad_norm": 1.253448851129024, + "learning_rate": 8.77315199392558e-06, + "loss": 0.4673, + "step": 8496 + }, + { + "epoch": 5.485474499677212, + "grad_norm": 1.4021899782714466, + "learning_rate": 8.751394130837841e-06, + "loss": 0.5321, + "step": 8497 + }, + { + "epoch": 5.4861200774693355, + "grad_norm": 1.4266066053034399, + "learning_rate": 8.729662470833004e-06, + "loss": 0.4675, + "step": 8498 + }, + { + "epoch": 5.486765655261459, + "grad_norm": 1.2431274196074547, + "learning_rate": 8.707957017942514e-06, + "loss": 0.4767, + "step": 8499 + }, + { + "epoch": 5.487411233053583, + "grad_norm": 1.292221260648863, + "learning_rate": 8.686277776192946e-06, + "loss": 0.5004, + "step": 8500 + }, + { + "epoch": 5.488056810845707, + "grad_norm": 1.4622878955505851, + "learning_rate": 8.664624749606036e-06, + "loss": 0.5561, + "step": 8501 + }, + { + "epoch": 5.488702388637831, + "grad_norm": 1.3796713437331662, + "learning_rate": 8.64299794219862e-06, + "loss": 0.489, + "step": 8502 + }, + { + "epoch": 5.489347966429955, + "grad_norm": 1.2819064496674346, + "learning_rate": 8.62139735798274e-06, + "loss": 0.481, + "step": 8503 + }, + { + "epoch": 5.489993544222079, + "grad_norm": 1.347062584662666, + "learning_rate": 8.599823000965489e-06, + "loss": 0.4646, + "step": 8504 + }, + { + "epoch": 5.490639122014203, + "grad_norm": 2.973582272474876, + "learning_rate": 8.57827487514915e-06, + "loss": 0.5901, + "step": 8505 + }, + { + "epoch": 5.491284699806327, + "grad_norm": 1.2773658699223476, + "learning_rate": 8.556752984531124e-06, + "loss": 0.3988, + "step": 8506 + }, + { + "epoch": 5.491930277598451, + "grad_norm": 1.6818540231179395, + "learning_rate": 8.535257333103935e-06, + "loss": 0.5416, + "step": 8507 + }, + { + "epoch": 5.492575855390575, + "grad_norm": 1.3165603414817262, + "learning_rate": 8.513787924855242e-06, + "loss": 0.4813, + "step": 8508 + }, + { + "epoch": 5.493221433182699, + "grad_norm": 1.57015915261534, + "learning_rate": 8.492344763767911e-06, + "loss": 0.5446, + "step": 8509 + }, + { + "epoch": 5.493867010974823, + "grad_norm": 1.6590971210442165, + "learning_rate": 8.470927853819776e-06, + "loss": 0.4734, + "step": 8510 + }, + { + "epoch": 5.494512588766947, + "grad_norm": 1.4184809269907106, + "learning_rate": 8.44953719898394e-06, + "loss": 0.5175, + "step": 8511 + }, + { + "epoch": 5.4951581665590705, + "grad_norm": 1.3990454267783927, + "learning_rate": 8.428172803228633e-06, + "loss": 0.5352, + "step": 8512 + }, + { + "epoch": 5.4958037443511945, + "grad_norm": 1.3748816607129462, + "learning_rate": 8.406834670517082e-06, + "loss": 0.4802, + "step": 8513 + }, + { + "epoch": 5.496449322143318, + "grad_norm": 1.4448005019945502, + "learning_rate": 8.385522804807804e-06, + "loss": 0.5056, + "step": 8514 + }, + { + "epoch": 5.497094899935442, + "grad_norm": 1.349747737587366, + "learning_rate": 8.364237210054369e-06, + "loss": 0.4984, + "step": 8515 + }, + { + "epoch": 5.497740477727566, + "grad_norm": 1.4674008937086662, + "learning_rate": 8.34297789020542e-06, + "loss": 0.4417, + "step": 8516 + }, + { + "epoch": 5.49838605551969, + "grad_norm": 1.51364322081311, + "learning_rate": 8.32174484920482e-06, + "loss": 0.5548, + "step": 8517 + }, + { + "epoch": 5.499031633311814, + "grad_norm": 1.2650511076724666, + "learning_rate": 8.300538090991549e-06, + "loss": 0.4675, + "step": 8518 + }, + { + "epoch": 5.499677211103938, + "grad_norm": 1.2902129170873557, + "learning_rate": 8.279357619499611e-06, + "loss": 0.4446, + "step": 8519 + }, + { + "epoch": 5.500322788896062, + "grad_norm": 1.2854372739918873, + "learning_rate": 8.258203438658245e-06, + "loss": 0.4664, + "step": 8520 + }, + { + "epoch": 5.500968366688186, + "grad_norm": 1.453610326324807, + "learning_rate": 8.237075552391763e-06, + "loss": 0.5118, + "step": 8521 + }, + { + "epoch": 5.50161394448031, + "grad_norm": 1.4324897684847062, + "learning_rate": 8.215973964619616e-06, + "loss": 0.4917, + "step": 8522 + }, + { + "epoch": 5.502259522272434, + "grad_norm": 1.6452272040779201, + "learning_rate": 8.194898679256367e-06, + "loss": 0.4745, + "step": 8523 + }, + { + "epoch": 5.502905100064558, + "grad_norm": 1.3011683632475506, + "learning_rate": 8.173849700211694e-06, + "loss": 0.4554, + "step": 8524 + }, + { + "epoch": 5.503550677856682, + "grad_norm": 1.4629163647178955, + "learning_rate": 8.152827031390385e-06, + "loss": 0.5091, + "step": 8525 + }, + { + "epoch": 5.5041962556488055, + "grad_norm": 1.3596435765154835, + "learning_rate": 8.131830676692392e-06, + "loss": 0.4882, + "step": 8526 + }, + { + "epoch": 5.5048418334409295, + "grad_norm": 1.363803660190623, + "learning_rate": 8.110860640012745e-06, + "loss": 0.4733, + "step": 8527 + }, + { + "epoch": 5.505487411233053, + "grad_norm": 1.4386222129038613, + "learning_rate": 8.089916925241602e-06, + "loss": 0.5348, + "step": 8528 + }, + { + "epoch": 5.506132989025177, + "grad_norm": 2.6909322894690195, + "learning_rate": 8.068999536264254e-06, + "loss": 0.5333, + "step": 8529 + }, + { + "epoch": 5.506778566817301, + "grad_norm": 1.4696995181754682, + "learning_rate": 8.048108476961063e-06, + "loss": 0.5281, + "step": 8530 + }, + { + "epoch": 5.507424144609425, + "grad_norm": 1.4390926660547334, + "learning_rate": 8.027243751207562e-06, + "loss": 0.5174, + "step": 8531 + }, + { + "epoch": 5.508069722401549, + "grad_norm": 1.1798720019655027, + "learning_rate": 8.006405362874358e-06, + "loss": 0.4039, + "step": 8532 + }, + { + "epoch": 5.508715300193673, + "grad_norm": 2.915116330949545, + "learning_rate": 7.985593315827188e-06, + "loss": 0.4243, + "step": 8533 + }, + { + "epoch": 5.509360877985797, + "grad_norm": 1.5012536804283656, + "learning_rate": 7.964807613926949e-06, + "loss": 0.5939, + "step": 8534 + }, + { + "epoch": 5.510006455777921, + "grad_norm": 1.6595213712062202, + "learning_rate": 7.944048261029523e-06, + "loss": 0.5179, + "step": 8535 + }, + { + "epoch": 5.510652033570045, + "grad_norm": 1.28945777814228, + "learning_rate": 7.923315260986062e-06, + "loss": 0.4863, + "step": 8536 + }, + { + "epoch": 5.511297611362169, + "grad_norm": 1.4079308961063226, + "learning_rate": 7.902608617642725e-06, + "loss": 0.4949, + "step": 8537 + }, + { + "epoch": 5.511943189154293, + "grad_norm": 1.2394862036713314, + "learning_rate": 7.881928334840787e-06, + "loss": 0.4409, + "step": 8538 + }, + { + "epoch": 5.512588766946417, + "grad_norm": 1.3303597091199948, + "learning_rate": 7.86127441641668e-06, + "loss": 0.4642, + "step": 8539 + }, + { + "epoch": 5.513234344738541, + "grad_norm": 2.256246690138214, + "learning_rate": 7.840646866201938e-06, + "loss": 0.4244, + "step": 8540 + }, + { + "epoch": 5.5138799225306645, + "grad_norm": 1.8464576824732748, + "learning_rate": 7.820045688023119e-06, + "loss": 0.513, + "step": 8541 + }, + { + "epoch": 5.514525500322788, + "grad_norm": 1.3768106878282769, + "learning_rate": 7.799470885702014e-06, + "loss": 0.5011, + "step": 8542 + }, + { + "epoch": 5.515171078114912, + "grad_norm": 1.420482140602474, + "learning_rate": 7.77892246305547e-06, + "loss": 0.5494, + "step": 8543 + }, + { + "epoch": 5.515816655907037, + "grad_norm": 1.4144790465342556, + "learning_rate": 7.758400423895372e-06, + "loss": 0.5516, + "step": 8544 + }, + { + "epoch": 5.516462233699161, + "grad_norm": 1.369304351342113, + "learning_rate": 7.737904772028824e-06, + "loss": 0.4668, + "step": 8545 + }, + { + "epoch": 5.517107811491285, + "grad_norm": 1.3726500094511116, + "learning_rate": 7.71743551125797e-06, + "loss": 0.5381, + "step": 8546 + }, + { + "epoch": 5.517753389283409, + "grad_norm": 1.8003471750554905, + "learning_rate": 7.696992645380068e-06, + "loss": 0.4858, + "step": 8547 + }, + { + "epoch": 5.518398967075533, + "grad_norm": 1.6798827194042494, + "learning_rate": 7.676576178187488e-06, + "loss": 0.6229, + "step": 8548 + }, + { + "epoch": 5.519044544867657, + "grad_norm": 1.42485215417818, + "learning_rate": 7.656186113467666e-06, + "loss": 0.5349, + "step": 8549 + }, + { + "epoch": 5.519690122659781, + "grad_norm": 1.4078246404133956, + "learning_rate": 7.635822455003227e-06, + "loss": 0.535, + "step": 8550 + }, + { + "epoch": 5.520335700451905, + "grad_norm": 1.6289243938910485, + "learning_rate": 7.615485206571781e-06, + "loss": 0.6916, + "step": 8551 + }, + { + "epoch": 5.520981278244029, + "grad_norm": 1.2366444428328893, + "learning_rate": 7.595174371946144e-06, + "loss": 0.4753, + "step": 8552 + }, + { + "epoch": 5.5216268560361526, + "grad_norm": 1.5746109040496774, + "learning_rate": 7.574889954894153e-06, + "loss": 0.536, + "step": 8553 + }, + { + "epoch": 5.5222724338282765, + "grad_norm": 1.339012127326273, + "learning_rate": 7.554631959178814e-06, + "loss": 0.5212, + "step": 8554 + }, + { + "epoch": 5.5229180116204, + "grad_norm": 1.3176022093853805, + "learning_rate": 7.534400388558154e-06, + "loss": 0.439, + "step": 8555 + }, + { + "epoch": 5.523563589412524, + "grad_norm": 1.4908716445129977, + "learning_rate": 7.5141952467853705e-06, + "loss": 0.4794, + "step": 8556 + }, + { + "epoch": 5.524209167204648, + "grad_norm": 1.4242240719929224, + "learning_rate": 7.494016537608733e-06, + "loss": 0.4961, + "step": 8557 + }, + { + "epoch": 5.524854744996772, + "grad_norm": 1.3694669989465043, + "learning_rate": 7.473864264771545e-06, + "loss": 0.5184, + "step": 8558 + }, + { + "epoch": 5.525500322788896, + "grad_norm": 1.3479508345521392, + "learning_rate": 7.453738432012351e-06, + "loss": 0.4607, + "step": 8559 + }, + { + "epoch": 5.52614590058102, + "grad_norm": 1.5529592856641872, + "learning_rate": 7.433639043064616e-06, + "loss": 0.6275, + "step": 8560 + }, + { + "epoch": 5.526791478373144, + "grad_norm": 1.543327502472854, + "learning_rate": 7.413566101657037e-06, + "loss": 0.5764, + "step": 8561 + }, + { + "epoch": 5.527437056165268, + "grad_norm": 1.3164632371322729, + "learning_rate": 7.393519611513371e-06, + "loss": 0.5442, + "step": 8562 + }, + { + "epoch": 5.528082633957392, + "grad_norm": 1.4137873969193866, + "learning_rate": 7.37349957635236e-06, + "loss": 0.4714, + "step": 8563 + }, + { + "epoch": 5.528728211749516, + "grad_norm": 1.3328606744511229, + "learning_rate": 7.353505999888032e-06, + "loss": 0.5024, + "step": 8564 + }, + { + "epoch": 5.52937378954164, + "grad_norm": 1.2775577163784193, + "learning_rate": 7.333538885829354e-06, + "loss": 0.4392, + "step": 8565 + }, + { + "epoch": 5.530019367333764, + "grad_norm": 1.3884586726991268, + "learning_rate": 7.313598237880414e-06, + "loss": 0.4606, + "step": 8566 + }, + { + "epoch": 5.530664945125888, + "grad_norm": 1.4351856384773833, + "learning_rate": 7.293684059740435e-06, + "loss": 0.5096, + "step": 8567 + }, + { + "epoch": 5.5313105229180115, + "grad_norm": 1.272333373925623, + "learning_rate": 7.273796355103745e-06, + "loss": 0.4645, + "step": 8568 + }, + { + "epoch": 5.531956100710135, + "grad_norm": 1.3785179788544462, + "learning_rate": 7.25393512765961e-06, + "loss": 0.4986, + "step": 8569 + }, + { + "epoch": 5.532601678502259, + "grad_norm": 1.4362575941347668, + "learning_rate": 7.2341003810926005e-06, + "loss": 0.4773, + "step": 8570 + }, + { + "epoch": 5.533247256294383, + "grad_norm": 1.4126164388522011, + "learning_rate": 7.214292119082221e-06, + "loss": 0.5137, + "step": 8571 + }, + { + "epoch": 5.533892834086507, + "grad_norm": 1.4168973386568031, + "learning_rate": 7.1945103453031176e-06, + "loss": 0.4968, + "step": 8572 + }, + { + "epoch": 5.534538411878631, + "grad_norm": 1.4008647953816453, + "learning_rate": 7.174755063425003e-06, + "loss": 0.5019, + "step": 8573 + }, + { + "epoch": 5.535183989670755, + "grad_norm": 1.1926376800792038, + "learning_rate": 7.1550262771127134e-06, + "loss": 0.418, + "step": 8574 + }, + { + "epoch": 5.535829567462879, + "grad_norm": 1.354182271499431, + "learning_rate": 7.135323990026104e-06, + "loss": 0.456, + "step": 8575 + }, + { + "epoch": 5.536475145255003, + "grad_norm": 1.4626339621480677, + "learning_rate": 7.115648205820201e-06, + "loss": 0.4513, + "step": 8576 + }, + { + "epoch": 5.537120723047127, + "grad_norm": 1.5173593885440917, + "learning_rate": 7.0959989281450195e-06, + "loss": 0.6124, + "step": 8577 + }, + { + "epoch": 5.537766300839251, + "grad_norm": 1.5839526811498559, + "learning_rate": 7.076376160645741e-06, + "loss": 0.5494, + "step": 8578 + }, + { + "epoch": 5.538411878631375, + "grad_norm": 1.4409422212643686, + "learning_rate": 7.05677990696259e-06, + "loss": 0.5142, + "step": 8579 + }, + { + "epoch": 5.539057456423499, + "grad_norm": 1.4314297471634365, + "learning_rate": 7.037210170730839e-06, + "loss": 0.4801, + "step": 8580 + }, + { + "epoch": 5.539703034215623, + "grad_norm": 1.4467916212682062, + "learning_rate": 7.017666955580919e-06, + "loss": 0.5811, + "step": 8581 + }, + { + "epoch": 5.540348612007747, + "grad_norm": 1.2026228652877913, + "learning_rate": 6.99815026513828e-06, + "loss": 0.4447, + "step": 8582 + }, + { + "epoch": 5.540994189799871, + "grad_norm": 1.400562234358666, + "learning_rate": 6.978660103023459e-06, + "loss": 0.4959, + "step": 8583 + }, + { + "epoch": 5.541639767591995, + "grad_norm": 1.5200848895887398, + "learning_rate": 6.95919647285213e-06, + "loss": 0.53, + "step": 8584 + }, + { + "epoch": 5.542285345384119, + "grad_norm": 1.576705857364857, + "learning_rate": 6.939759378234921e-06, + "loss": 0.5635, + "step": 8585 + }, + { + "epoch": 5.542930923176243, + "grad_norm": 1.4353916885008913, + "learning_rate": 6.920348822777683e-06, + "loss": 0.5244, + "step": 8586 + }, + { + "epoch": 5.543576500968367, + "grad_norm": 1.3808061823944158, + "learning_rate": 6.9009648100812835e-06, + "loss": 0.428, + "step": 8587 + }, + { + "epoch": 5.544222078760491, + "grad_norm": 1.4932250967887306, + "learning_rate": 6.881607343741579e-06, + "loss": 0.5377, + "step": 8588 + }, + { + "epoch": 5.544867656552615, + "grad_norm": 1.2243736332100195, + "learning_rate": 6.86227642734965e-06, + "loss": 0.4148, + "step": 8589 + }, + { + "epoch": 5.545513234344739, + "grad_norm": 1.3416124855469693, + "learning_rate": 6.842972064491575e-06, + "loss": 0.5122, + "step": 8590 + }, + { + "epoch": 5.546158812136863, + "grad_norm": 1.5463483908533864, + "learning_rate": 6.8236942587484744e-06, + "loss": 0.4661, + "step": 8591 + }, + { + "epoch": 5.546804389928987, + "grad_norm": 1.453567442302381, + "learning_rate": 6.80444301369662e-06, + "loss": 0.5372, + "step": 8592 + }, + { + "epoch": 5.547449967721111, + "grad_norm": 1.4247911379436118, + "learning_rate": 6.785218332907322e-06, + "loss": 0.5193, + "step": 8593 + }, + { + "epoch": 5.548095545513235, + "grad_norm": 1.7049856828896983, + "learning_rate": 6.766020219946894e-06, + "loss": 0.4872, + "step": 8594 + }, + { + "epoch": 5.5487411233053585, + "grad_norm": 1.379446972094335, + "learning_rate": 6.7468486783768535e-06, + "loss": 0.4515, + "step": 8595 + }, + { + "epoch": 5.549386701097482, + "grad_norm": 1.489309272103806, + "learning_rate": 6.727703711753707e-06, + "loss": 0.5301, + "step": 8596 + }, + { + "epoch": 5.550032278889606, + "grad_norm": 1.571341022275847, + "learning_rate": 6.7085853236290285e-06, + "loss": 0.5979, + "step": 8597 + }, + { + "epoch": 5.55067785668173, + "grad_norm": 1.4082059194713883, + "learning_rate": 6.689493517549499e-06, + "loss": 0.5272, + "step": 8598 + }, + { + "epoch": 5.551323434473854, + "grad_norm": 1.2802352400395751, + "learning_rate": 6.670428297056818e-06, + "loss": 0.4741, + "step": 8599 + }, + { + "epoch": 5.551969012265978, + "grad_norm": 1.3103543332856071, + "learning_rate": 6.6513896656878065e-06, + "loss": 0.4776, + "step": 8600 + }, + { + "epoch": 5.552614590058102, + "grad_norm": 1.4444397841944003, + "learning_rate": 6.632377626974323e-06, + "loss": 0.4244, + "step": 8601 + }, + { + "epoch": 5.553260167850226, + "grad_norm": 1.235268483972302, + "learning_rate": 6.613392184443278e-06, + "loss": 0.4248, + "step": 8602 + }, + { + "epoch": 5.55390574564235, + "grad_norm": 1.5873867344816395, + "learning_rate": 6.59443334161669e-06, + "loss": 0.5767, + "step": 8603 + }, + { + "epoch": 5.554551323434474, + "grad_norm": 1.6743060264833272, + "learning_rate": 6.575501102011626e-06, + "loss": 0.5376, + "step": 8604 + }, + { + "epoch": 5.555196901226598, + "grad_norm": 1.440387658495719, + "learning_rate": 6.55659546914018e-06, + "loss": 0.4889, + "step": 8605 + }, + { + "epoch": 5.555842479018722, + "grad_norm": 1.352590661215599, + "learning_rate": 6.537716446509594e-06, + "loss": 0.4689, + "step": 8606 + }, + { + "epoch": 5.556488056810846, + "grad_norm": 1.3312294126413848, + "learning_rate": 6.518864037622068e-06, + "loss": 0.4346, + "step": 8607 + }, + { + "epoch": 5.55713363460297, + "grad_norm": 1.7445348331350652, + "learning_rate": 6.500038245974937e-06, + "loss": 0.4842, + "step": 8608 + }, + { + "epoch": 5.5577792123950935, + "grad_norm": 1.49261716750272, + "learning_rate": 6.48123907506064e-06, + "loss": 0.5441, + "step": 8609 + }, + { + "epoch": 5.5584247901872175, + "grad_norm": 1.3254219328390366, + "learning_rate": 6.462466528366505e-06, + "loss": 0.5174, + "step": 8610 + }, + { + "epoch": 5.559070367979341, + "grad_norm": 1.210300788850303, + "learning_rate": 6.44372060937513e-06, + "loss": 0.43, + "step": 8611 + }, + { + "epoch": 5.559715945771465, + "grad_norm": 1.324767721960576, + "learning_rate": 6.425001321564066e-06, + "loss": 0.5506, + "step": 8612 + }, + { + "epoch": 5.560361523563589, + "grad_norm": 1.3670621013378474, + "learning_rate": 6.406308668405885e-06, + "loss": 0.467, + "step": 8613 + }, + { + "epoch": 5.561007101355713, + "grad_norm": 1.3050804460757015, + "learning_rate": 6.387642653368297e-06, + "loss": 0.4971, + "step": 8614 + }, + { + "epoch": 5.561652679147837, + "grad_norm": 1.2577879642466545, + "learning_rate": 6.369003279914065e-06, + "loss": 0.5051, + "step": 8615 + }, + { + "epoch": 5.562298256939961, + "grad_norm": 1.6748109686459915, + "learning_rate": 6.350390551500939e-06, + "loss": 0.5182, + "step": 8616 + }, + { + "epoch": 5.562943834732085, + "grad_norm": 1.445874908427761, + "learning_rate": 6.331804471581808e-06, + "loss": 0.4579, + "step": 8617 + }, + { + "epoch": 5.563589412524209, + "grad_norm": 1.311298581908618, + "learning_rate": 6.313245043604565e-06, + "loss": 0.5052, + "step": 8618 + }, + { + "epoch": 5.564234990316333, + "grad_norm": 1.2941363041858291, + "learning_rate": 6.2947122710121865e-06, + "loss": 0.4603, + "step": 8619 + }, + { + "epoch": 5.564880568108457, + "grad_norm": 1.155283806881329, + "learning_rate": 6.276206157242691e-06, + "loss": 0.4131, + "step": 8620 + }, + { + "epoch": 5.565526145900581, + "grad_norm": 1.5996293373864612, + "learning_rate": 6.257726705729166e-06, + "loss": 0.4827, + "step": 8621 + }, + { + "epoch": 5.566171723692705, + "grad_norm": 1.3596696039004343, + "learning_rate": 6.239273919899718e-06, + "loss": 0.4306, + "step": 8622 + }, + { + "epoch": 5.566817301484829, + "grad_norm": 1.369085920097793, + "learning_rate": 6.220847803177559e-06, + "loss": 0.5384, + "step": 8623 + }, + { + "epoch": 5.5674628792769525, + "grad_norm": 1.3142786702678468, + "learning_rate": 6.202448358980888e-06, + "loss": 0.4651, + "step": 8624 + }, + { + "epoch": 5.568108457069076, + "grad_norm": 1.4352705621744988, + "learning_rate": 6.184075590723042e-06, + "loss": 0.5178, + "step": 8625 + }, + { + "epoch": 5.5687540348612, + "grad_norm": 1.3273776173388983, + "learning_rate": 6.165729501812311e-06, + "loss": 0.4979, + "step": 8626 + }, + { + "epoch": 5.569399612653324, + "grad_norm": 1.3860787867003241, + "learning_rate": 6.147410095652106e-06, + "loss": 0.4579, + "step": 8627 + }, + { + "epoch": 5.570045190445448, + "grad_norm": 1.4170871264289686, + "learning_rate": 6.129117375640874e-06, + "loss": 0.5214, + "step": 8628 + }, + { + "epoch": 5.570690768237572, + "grad_norm": 1.5153453069959355, + "learning_rate": 6.110851345172102e-06, + "loss": 0.4954, + "step": 8629 + }, + { + "epoch": 5.571336346029697, + "grad_norm": 1.396606795724796, + "learning_rate": 6.0926120076343285e-06, + "loss": 0.4989, + "step": 8630 + }, + { + "epoch": 5.571981923821821, + "grad_norm": 1.3995130684680699, + "learning_rate": 6.0743993664111145e-06, + "loss": 0.5416, + "step": 8631 + }, + { + "epoch": 5.572627501613945, + "grad_norm": 1.4835120671214819, + "learning_rate": 6.05621342488114e-06, + "loss": 0.5312, + "step": 8632 + }, + { + "epoch": 5.573273079406069, + "grad_norm": 1.4306485340034367, + "learning_rate": 6.038054186418023e-06, + "loss": 0.472, + "step": 8633 + }, + { + "epoch": 5.573918657198193, + "grad_norm": 1.38446826642852, + "learning_rate": 6.019921654390586e-06, + "loss": 0.4577, + "step": 8634 + }, + { + "epoch": 5.574564234990317, + "grad_norm": 1.8388129495815801, + "learning_rate": 6.001815832162488e-06, + "loss": 0.4666, + "step": 8635 + }, + { + "epoch": 5.5752098127824405, + "grad_norm": 1.2825191485344531, + "learning_rate": 5.983736723092625e-06, + "loss": 0.4702, + "step": 8636 + }, + { + "epoch": 5.5758553905745645, + "grad_norm": 1.4309941174688048, + "learning_rate": 5.965684330534865e-06, + "loss": 0.5087, + "step": 8637 + }, + { + "epoch": 5.576500968366688, + "grad_norm": 1.4915219947147873, + "learning_rate": 5.947658657838061e-06, + "loss": 0.499, + "step": 8638 + }, + { + "epoch": 5.577146546158812, + "grad_norm": 1.2361281100166106, + "learning_rate": 5.92965970834619e-06, + "loss": 0.4529, + "step": 8639 + }, + { + "epoch": 5.577792123950936, + "grad_norm": 1.4097937915705405, + "learning_rate": 5.911687485398265e-06, + "loss": 0.5007, + "step": 8640 + }, + { + "epoch": 5.57843770174306, + "grad_norm": 1.5123067140600868, + "learning_rate": 5.893741992328266e-06, + "loss": 0.5148, + "step": 8641 + }, + { + "epoch": 5.579083279535184, + "grad_norm": 1.2629542206687823, + "learning_rate": 5.875823232465315e-06, + "loss": 0.4661, + "step": 8642 + }, + { + "epoch": 5.579728857327308, + "grad_norm": 1.3945106230048403, + "learning_rate": 5.857931209133521e-06, + "loss": 0.5224, + "step": 8643 + }, + { + "epoch": 5.580374435119432, + "grad_norm": 1.2412184249311733, + "learning_rate": 5.840065925652027e-06, + "loss": 0.4219, + "step": 8644 + }, + { + "epoch": 5.581020012911556, + "grad_norm": 1.6545933248289617, + "learning_rate": 5.8222273853350325e-06, + "loss": 0.5953, + "step": 8645 + }, + { + "epoch": 5.58166559070368, + "grad_norm": 1.6400668174848005, + "learning_rate": 5.804415591491773e-06, + "loss": 0.5563, + "step": 8646 + }, + { + "epoch": 5.582311168495804, + "grad_norm": 1.4984854256703568, + "learning_rate": 5.786630547426524e-06, + "loss": 0.537, + "step": 8647 + }, + { + "epoch": 5.582956746287928, + "grad_norm": 1.5321401550108547, + "learning_rate": 5.7688722564385785e-06, + "loss": 0.6232, + "step": 8648 + }, + { + "epoch": 5.583602324080052, + "grad_norm": 1.3869332742207492, + "learning_rate": 5.751140721822317e-06, + "loss": 0.4671, + "step": 8649 + }, + { + "epoch": 5.584247901872176, + "grad_norm": 1.39220974941596, + "learning_rate": 5.73343594686711e-06, + "loss": 0.5204, + "step": 8650 + }, + { + "epoch": 5.5848934796642995, + "grad_norm": 1.7967405843139412, + "learning_rate": 5.715757934857362e-06, + "loss": 0.5263, + "step": 8651 + }, + { + "epoch": 5.585539057456423, + "grad_norm": 1.9538367441574729, + "learning_rate": 5.6981066890725345e-06, + "loss": 0.5525, + "step": 8652 + }, + { + "epoch": 5.586184635248547, + "grad_norm": 1.4081956085459748, + "learning_rate": 5.680482212787124e-06, + "loss": 0.5378, + "step": 8653 + }, + { + "epoch": 5.586830213040671, + "grad_norm": 1.3832565242703552, + "learning_rate": 5.662884509270649e-06, + "loss": 0.4834, + "step": 8654 + }, + { + "epoch": 5.587475790832795, + "grad_norm": 1.494058926734988, + "learning_rate": 5.6453135817876805e-06, + "loss": 0.5307, + "step": 8655 + }, + { + "epoch": 5.588121368624919, + "grad_norm": 1.3433286774608613, + "learning_rate": 5.627769433597778e-06, + "loss": 0.5089, + "step": 8656 + }, + { + "epoch": 5.588766946417043, + "grad_norm": 1.6374760094895155, + "learning_rate": 5.61025206795559e-06, + "loss": 0.4943, + "step": 8657 + }, + { + "epoch": 5.589412524209167, + "grad_norm": 1.8792644973459438, + "learning_rate": 5.592761488110731e-06, + "loss": 0.4514, + "step": 8658 + }, + { + "epoch": 5.590058102001291, + "grad_norm": 1.5476957724753535, + "learning_rate": 5.575297697307957e-06, + "loss": 0.4714, + "step": 8659 + }, + { + "epoch": 5.590703679793415, + "grad_norm": 1.3575701588144868, + "learning_rate": 5.557860698786909e-06, + "loss": 0.4472, + "step": 8660 + }, + { + "epoch": 5.591349257585539, + "grad_norm": 1.376405465489789, + "learning_rate": 5.540450495782367e-06, + "loss": 0.4632, + "step": 8661 + }, + { + "epoch": 5.591994835377663, + "grad_norm": 1.5656196088774335, + "learning_rate": 5.52306709152413e-06, + "loss": 0.511, + "step": 8662 + }, + { + "epoch": 5.592640413169787, + "grad_norm": 1.4649962821284728, + "learning_rate": 5.50571048923692e-06, + "loss": 0.5533, + "step": 8663 + }, + { + "epoch": 5.593285990961911, + "grad_norm": 1.2727715723203101, + "learning_rate": 5.488380692140643e-06, + "loss": 0.4051, + "step": 8664 + }, + { + "epoch": 5.5939315687540345, + "grad_norm": 1.5226404384683192, + "learning_rate": 5.471077703450144e-06, + "loss": 0.5183, + "step": 8665 + }, + { + "epoch": 5.5945771465461585, + "grad_norm": 1.3529152298084421, + "learning_rate": 5.453801526375257e-06, + "loss": 0.4688, + "step": 8666 + }, + { + "epoch": 5.595222724338282, + "grad_norm": 1.356106756081655, + "learning_rate": 5.43655216412095e-06, + "loss": 0.494, + "step": 8667 + }, + { + "epoch": 5.595868302130407, + "grad_norm": 1.3465981931697508, + "learning_rate": 5.4193296198871306e-06, + "loss": 0.5033, + "step": 8668 + }, + { + "epoch": 5.596513879922531, + "grad_norm": 1.308383462073771, + "learning_rate": 5.40213389686876e-06, + "loss": 0.4869, + "step": 8669 + }, + { + "epoch": 5.597159457714655, + "grad_norm": 1.3569968153352776, + "learning_rate": 5.3849649982558365e-06, + "loss": 0.4794, + "step": 8670 + }, + { + "epoch": 5.597805035506779, + "grad_norm": 1.612008196135562, + "learning_rate": 5.367822927233361e-06, + "loss": 0.6047, + "step": 8671 + }, + { + "epoch": 5.598450613298903, + "grad_norm": 1.4294266968261924, + "learning_rate": 5.350707686981359e-06, + "loss": 0.5186, + "step": 8672 + }, + { + "epoch": 5.599096191091027, + "grad_norm": 1.552136601081148, + "learning_rate": 5.333619280674905e-06, + "loss": 0.545, + "step": 8673 + }, + { + "epoch": 5.599741768883151, + "grad_norm": 1.4829231924859936, + "learning_rate": 5.3165577114840485e-06, + "loss": 0.5054, + "step": 8674 + }, + { + "epoch": 5.600387346675275, + "grad_norm": 1.6013813834746733, + "learning_rate": 5.299522982573906e-06, + "loss": 0.56, + "step": 8675 + }, + { + "epoch": 5.601032924467399, + "grad_norm": 1.266180207630334, + "learning_rate": 5.282515097104601e-06, + "loss": 0.4373, + "step": 8676 + }, + { + "epoch": 5.601678502259523, + "grad_norm": 1.5548195507630707, + "learning_rate": 5.265534058231258e-06, + "loss": 0.5699, + "step": 8677 + }, + { + "epoch": 5.6023240800516465, + "grad_norm": 1.307480741908334, + "learning_rate": 5.248579869104042e-06, + "loss": 0.4738, + "step": 8678 + }, + { + "epoch": 5.60296965784377, + "grad_norm": 1.4199816599763564, + "learning_rate": 5.2316525328681355e-06, + "loss": 0.5141, + "step": 8679 + }, + { + "epoch": 5.603615235635894, + "grad_norm": 1.6166923030313998, + "learning_rate": 5.214752052663729e-06, + "loss": 0.4417, + "step": 8680 + }, + { + "epoch": 5.604260813428018, + "grad_norm": 1.3784386980087198, + "learning_rate": 5.1978784316260455e-06, + "loss": 0.5057, + "step": 8681 + }, + { + "epoch": 5.604906391220142, + "grad_norm": 1.4513993707541293, + "learning_rate": 5.1810316728853e-06, + "loss": 0.533, + "step": 8682 + }, + { + "epoch": 5.605551969012266, + "grad_norm": 1.3076478385817785, + "learning_rate": 5.1642117795667405e-06, + "loss": 0.4667, + "step": 8683 + }, + { + "epoch": 5.60619754680439, + "grad_norm": 1.3858889257598568, + "learning_rate": 5.147418754790672e-06, + "loss": 0.4169, + "step": 8684 + }, + { + "epoch": 5.606843124596514, + "grad_norm": 1.4266287672396674, + "learning_rate": 5.130652601672319e-06, + "loss": 0.5113, + "step": 8685 + }, + { + "epoch": 5.607488702388638, + "grad_norm": 1.4225177944837601, + "learning_rate": 5.113913323322011e-06, + "loss": 0.476, + "step": 8686 + }, + { + "epoch": 5.608134280180762, + "grad_norm": 1.3839213570740618, + "learning_rate": 5.0972009228450795e-06, + "loss": 0.4776, + "step": 8687 + }, + { + "epoch": 5.608779857972886, + "grad_norm": 1.6254839840073063, + "learning_rate": 5.080515403341779e-06, + "loss": 0.5822, + "step": 8688 + }, + { + "epoch": 5.60942543576501, + "grad_norm": 1.2452317300503435, + "learning_rate": 5.063856767907515e-06, + "loss": 0.4049, + "step": 8689 + }, + { + "epoch": 5.610071013557134, + "grad_norm": 1.30729591941049, + "learning_rate": 5.047225019632617e-06, + "loss": 0.4998, + "step": 8690 + }, + { + "epoch": 5.610716591349258, + "grad_norm": 1.4360714823596794, + "learning_rate": 5.0306201616024e-06, + "loss": 0.5335, + "step": 8691 + }, + { + "epoch": 5.6113621691413815, + "grad_norm": 1.318265589906181, + "learning_rate": 5.0140421968973e-06, + "loss": 0.4744, + "step": 8692 + }, + { + "epoch": 5.6120077469335055, + "grad_norm": 1.449480820862921, + "learning_rate": 4.99749112859269e-06, + "loss": 0.4442, + "step": 8693 + }, + { + "epoch": 5.612653324725629, + "grad_norm": 1.308347398425542, + "learning_rate": 4.980966959758948e-06, + "loss": 0.4317, + "step": 8694 + }, + { + "epoch": 5.613298902517753, + "grad_norm": 1.2423846218933647, + "learning_rate": 4.96446969346147e-06, + "loss": 0.4297, + "step": 8695 + }, + { + "epoch": 5.613944480309877, + "grad_norm": 1.4222765762494964, + "learning_rate": 4.947999332760694e-06, + "loss": 0.4982, + "step": 8696 + }, + { + "epoch": 5.614590058102001, + "grad_norm": 1.3371951978335495, + "learning_rate": 4.93155588071204e-06, + "loss": 0.3743, + "step": 8697 + }, + { + "epoch": 5.615235635894125, + "grad_norm": 1.3655836237248886, + "learning_rate": 4.915139340365937e-06, + "loss": 0.5142, + "step": 8698 + }, + { + "epoch": 5.615881213686249, + "grad_norm": 1.3970674102698966, + "learning_rate": 4.898749714767813e-06, + "loss": 0.5547, + "step": 8699 + }, + { + "epoch": 5.616526791478373, + "grad_norm": 1.3807072375022016, + "learning_rate": 4.882387006958105e-06, + "loss": 0.4961, + "step": 8700 + }, + { + "epoch": 5.617172369270497, + "grad_norm": 1.7363600561841595, + "learning_rate": 4.8660512199723e-06, + "loss": 0.5186, + "step": 8701 + }, + { + "epoch": 5.617817947062621, + "grad_norm": 1.4898938038665845, + "learning_rate": 4.849742356840841e-06, + "loss": 0.5419, + "step": 8702 + }, + { + "epoch": 5.618463524854745, + "grad_norm": 1.6244997171826072, + "learning_rate": 4.8334604205891745e-06, + "loss": 0.6319, + "step": 8703 + }, + { + "epoch": 5.619109102646869, + "grad_norm": 1.3445112924814202, + "learning_rate": 4.817205414237785e-06, + "loss": 0.431, + "step": 8704 + }, + { + "epoch": 5.619754680438993, + "grad_norm": 1.4374322725335134, + "learning_rate": 4.8009773408021426e-06, + "loss": 0.4716, + "step": 8705 + }, + { + "epoch": 5.6204002582311166, + "grad_norm": 1.7421949235689083, + "learning_rate": 4.784776203292706e-06, + "loss": 0.5076, + "step": 8706 + }, + { + "epoch": 5.6210458360232405, + "grad_norm": 1.324816162350217, + "learning_rate": 4.768602004714989e-06, + "loss": 0.4786, + "step": 8707 + }, + { + "epoch": 5.621691413815364, + "grad_norm": 1.3382307007606893, + "learning_rate": 4.752454748069439e-06, + "loss": 0.4659, + "step": 8708 + }, + { + "epoch": 5.622336991607488, + "grad_norm": 1.8852997341656694, + "learning_rate": 4.7363344363515795e-06, + "loss": 0.457, + "step": 8709 + }, + { + "epoch": 5.622982569399612, + "grad_norm": 1.448040877568718, + "learning_rate": 4.7202410725518335e-06, + "loss": 0.556, + "step": 8710 + }, + { + "epoch": 5.623628147191736, + "grad_norm": 1.4774343884148344, + "learning_rate": 4.704174659655763e-06, + "loss": 0.5394, + "step": 8711 + }, + { + "epoch": 5.62427372498386, + "grad_norm": 1.7676899683028562, + "learning_rate": 4.688135200643817e-06, + "loss": 0.5619, + "step": 8712 + }, + { + "epoch": 5.624919302775984, + "grad_norm": 1.4310894255166298, + "learning_rate": 4.672122698491448e-06, + "loss": 0.5304, + "step": 8713 + }, + { + "epoch": 5.625564880568108, + "grad_norm": 1.3215258389216116, + "learning_rate": 4.656137156169198e-06, + "loss": 0.4628, + "step": 8714 + }, + { + "epoch": 5.626210458360232, + "grad_norm": 1.2806387442581049, + "learning_rate": 4.640178576642545e-06, + "loss": 0.5024, + "step": 8715 + }, + { + "epoch": 5.626856036152357, + "grad_norm": 1.4412505546811072, + "learning_rate": 4.624246962871919e-06, + "loss": 0.4673, + "step": 8716 + }, + { + "epoch": 5.627501613944481, + "grad_norm": 1.5355983865996958, + "learning_rate": 4.608342317812841e-06, + "loss": 0.5072, + "step": 8717 + }, + { + "epoch": 5.628147191736605, + "grad_norm": 1.4844214021253814, + "learning_rate": 4.5924646444158e-06, + "loss": 0.5734, + "step": 8718 + }, + { + "epoch": 5.6287927695287285, + "grad_norm": 1.4144575979579685, + "learning_rate": 4.57661394562624e-06, + "loss": 0.5206, + "step": 8719 + }, + { + "epoch": 5.6294383473208525, + "grad_norm": 1.3055223085744196, + "learning_rate": 4.560790224384642e-06, + "loss": 0.4819, + "step": 8720 + }, + { + "epoch": 5.630083925112976, + "grad_norm": 1.558336766254925, + "learning_rate": 4.544993483626475e-06, + "loss": 0.638, + "step": 8721 + }, + { + "epoch": 5.6307295029051, + "grad_norm": 2.3976003014563516, + "learning_rate": 4.529223726282178e-06, + "loss": 0.4826, + "step": 8722 + }, + { + "epoch": 5.631375080697224, + "grad_norm": 1.351188870217632, + "learning_rate": 4.513480955277227e-06, + "loss": 0.5019, + "step": 8723 + }, + { + "epoch": 5.632020658489348, + "grad_norm": 1.302715624794523, + "learning_rate": 4.49776517353207e-06, + "loss": 0.4457, + "step": 8724 + }, + { + "epoch": 5.632666236281472, + "grad_norm": 1.7027843568323728, + "learning_rate": 4.482076383962141e-06, + "loss": 0.6342, + "step": 8725 + }, + { + "epoch": 5.633311814073596, + "grad_norm": 1.377528347657055, + "learning_rate": 4.4664145894778625e-06, + "loss": 0.5323, + "step": 8726 + }, + { + "epoch": 5.63395739186572, + "grad_norm": 1.3634193919421644, + "learning_rate": 4.450779792984677e-06, + "loss": 0.4749, + "step": 8727 + }, + { + "epoch": 5.634602969657844, + "grad_norm": 1.700295599185036, + "learning_rate": 4.43517199738298e-06, + "loss": 0.5395, + "step": 8728 + }, + { + "epoch": 5.635248547449968, + "grad_norm": 1.7230271237046708, + "learning_rate": 4.419591205568207e-06, + "loss": 0.5843, + "step": 8729 + }, + { + "epoch": 5.635894125242092, + "grad_norm": 1.2595506954604323, + "learning_rate": 4.4040374204307445e-06, + "loss": 0.4667, + "step": 8730 + }, + { + "epoch": 5.636539703034216, + "grad_norm": 1.7164004142125058, + "learning_rate": 4.388510644855986e-06, + "loss": 0.4941, + "step": 8731 + }, + { + "epoch": 5.63718528082634, + "grad_norm": 1.2512629447478012, + "learning_rate": 4.373010881724292e-06, + "loss": 0.4238, + "step": 8732 + }, + { + "epoch": 5.637830858618464, + "grad_norm": 1.6272274402444467, + "learning_rate": 4.357538133911032e-06, + "loss": 0.6044, + "step": 8733 + }, + { + "epoch": 5.6384764364105875, + "grad_norm": 1.3833182946065379, + "learning_rate": 4.3420924042865905e-06, + "loss": 0.5067, + "step": 8734 + }, + { + "epoch": 5.639122014202711, + "grad_norm": 2.243316522958746, + "learning_rate": 4.326673695716276e-06, + "loss": 0.5026, + "step": 8735 + }, + { + "epoch": 5.639767591994835, + "grad_norm": 1.5998107441702947, + "learning_rate": 4.311282011060435e-06, + "loss": 0.5741, + "step": 8736 + }, + { + "epoch": 5.640413169786959, + "grad_norm": 1.482849598541249, + "learning_rate": 4.295917353174416e-06, + "loss": 0.5528, + "step": 8737 + }, + { + "epoch": 5.641058747579083, + "grad_norm": 1.5113396036741733, + "learning_rate": 4.280579724908439e-06, + "loss": 0.541, + "step": 8738 + }, + { + "epoch": 5.641704325371207, + "grad_norm": 1.8020544570892154, + "learning_rate": 4.265269129107879e-06, + "loss": 0.5716, + "step": 8739 + }, + { + "epoch": 5.642349903163331, + "grad_norm": 1.493640605906499, + "learning_rate": 4.249985568612979e-06, + "loss": 0.5628, + "step": 8740 + }, + { + "epoch": 5.642995480955455, + "grad_norm": 1.5798928910184797, + "learning_rate": 4.234729046258972e-06, + "loss": 0.5463, + "step": 8741 + }, + { + "epoch": 5.643641058747579, + "grad_norm": 1.3081750258343168, + "learning_rate": 4.219499564876127e-06, + "loss": 0.4713, + "step": 8742 + }, + { + "epoch": 5.644286636539703, + "grad_norm": 1.4260456077497934, + "learning_rate": 4.204297127289685e-06, + "loss": 0.524, + "step": 8743 + }, + { + "epoch": 5.644932214331827, + "grad_norm": 1.4345101848532051, + "learning_rate": 4.189121736319823e-06, + "loss": 0.5594, + "step": 8744 + }, + { + "epoch": 5.645577792123951, + "grad_norm": 1.50256708956002, + "learning_rate": 4.173973394781755e-06, + "loss": 0.4608, + "step": 8745 + }, + { + "epoch": 5.646223369916075, + "grad_norm": 1.2725610974237984, + "learning_rate": 4.158852105485666e-06, + "loss": 0.4683, + "step": 8746 + }, + { + "epoch": 5.646868947708199, + "grad_norm": 1.4688978593166888, + "learning_rate": 4.143757871236681e-06, + "loss": 0.5355, + "step": 8747 + }, + { + "epoch": 5.6475145255003225, + "grad_norm": 1.2956138244667437, + "learning_rate": 4.128690694834957e-06, + "loss": 0.5271, + "step": 8748 + }, + { + "epoch": 5.648160103292446, + "grad_norm": 1.3273036643225335, + "learning_rate": 4.113650579075612e-06, + "loss": 0.5308, + "step": 8749 + }, + { + "epoch": 5.64880568108457, + "grad_norm": 1.6372706964540855, + "learning_rate": 4.098637526748727e-06, + "loss": 0.6954, + "step": 8750 + }, + { + "epoch": 5.649451258876694, + "grad_norm": 1.4439571638682511, + "learning_rate": 4.08365154063941e-06, + "loss": 0.5037, + "step": 8751 + }, + { + "epoch": 5.650096836668818, + "grad_norm": 1.4392577119028316, + "learning_rate": 4.068692623527686e-06, + "loss": 0.5017, + "step": 8752 + }, + { + "epoch": 5.650742414460942, + "grad_norm": 1.2704387456973552, + "learning_rate": 4.053760778188603e-06, + "loss": 0.4686, + "step": 8753 + }, + { + "epoch": 5.651387992253067, + "grad_norm": 1.3879885634015356, + "learning_rate": 4.038856007392161e-06, + "loss": 0.5171, + "step": 8754 + }, + { + "epoch": 5.652033570045191, + "grad_norm": 1.4204773306706737, + "learning_rate": 4.023978313903381e-06, + "loss": 0.49, + "step": 8755 + }, + { + "epoch": 5.652679147837315, + "grad_norm": 1.5150784554314172, + "learning_rate": 4.00912770048219e-06, + "loss": 0.5015, + "step": 8756 + }, + { + "epoch": 5.653324725629439, + "grad_norm": 1.7336427456825751, + "learning_rate": 3.9943041698835665e-06, + "loss": 0.4555, + "step": 8757 + }, + { + "epoch": 5.653970303421563, + "grad_norm": 1.4037458672247713, + "learning_rate": 3.979507724857395e-06, + "loss": 0.4561, + "step": 8758 + }, + { + "epoch": 5.654615881213687, + "grad_norm": 1.3995938071773362, + "learning_rate": 3.9647383681486125e-06, + "loss": 0.4773, + "step": 8759 + }, + { + "epoch": 5.655261459005811, + "grad_norm": 1.3777315596399253, + "learning_rate": 3.949996102497044e-06, + "loss": 0.5275, + "step": 8760 + }, + { + "epoch": 5.6559070367979345, + "grad_norm": 1.4873634082876854, + "learning_rate": 3.935280930637552e-06, + "loss": 0.5793, + "step": 8761 + }, + { + "epoch": 5.656552614590058, + "grad_norm": 1.2933288142871329, + "learning_rate": 3.920592855299986e-06, + "loss": 0.4247, + "step": 8762 + }, + { + "epoch": 5.657198192382182, + "grad_norm": 1.37852968533898, + "learning_rate": 3.9059318792090675e-06, + "loss": 0.5047, + "step": 8763 + }, + { + "epoch": 5.657843770174306, + "grad_norm": 1.16358130431143, + "learning_rate": 3.891298005084603e-06, + "loss": 0.4282, + "step": 8764 + }, + { + "epoch": 5.65848934796643, + "grad_norm": 1.4089549797157397, + "learning_rate": 3.876691235641338e-06, + "loss": 0.5563, + "step": 8765 + }, + { + "epoch": 5.659134925758554, + "grad_norm": 1.3570499170809804, + "learning_rate": 3.862111573588939e-06, + "loss": 0.4822, + "step": 8766 + }, + { + "epoch": 5.659780503550678, + "grad_norm": 1.4176247518550973, + "learning_rate": 3.847559021632124e-06, + "loss": 0.4966, + "step": 8767 + }, + { + "epoch": 5.660426081342802, + "grad_norm": 1.3526809642631807, + "learning_rate": 3.833033582470535e-06, + "loss": 0.4849, + "step": 8768 + }, + { + "epoch": 5.661071659134926, + "grad_norm": 1.4520528082512365, + "learning_rate": 3.818535258798783e-06, + "loss": 0.4702, + "step": 8769 + }, + { + "epoch": 5.66171723692705, + "grad_norm": 1.498691172629447, + "learning_rate": 3.8040640533064494e-06, + "loss": 0.5668, + "step": 8770 + }, + { + "epoch": 5.662362814719174, + "grad_norm": 1.3392204195109443, + "learning_rate": 3.7896199686781038e-06, + "loss": 0.4992, + "step": 8771 + }, + { + "epoch": 5.663008392511298, + "grad_norm": 1.2407500428167257, + "learning_rate": 3.775203007593286e-06, + "loss": 0.4009, + "step": 8772 + }, + { + "epoch": 5.663653970303422, + "grad_norm": 1.5232440116681427, + "learning_rate": 3.760813172726457e-06, + "loss": 0.456, + "step": 8773 + }, + { + "epoch": 5.664299548095546, + "grad_norm": 1.2969859857538268, + "learning_rate": 3.746450466747114e-06, + "loss": 0.4389, + "step": 8774 + }, + { + "epoch": 5.6649451258876695, + "grad_norm": 1.3801058575861562, + "learning_rate": 3.7321148923196766e-06, + "loss": 0.4471, + "step": 8775 + }, + { + "epoch": 5.6655907036797934, + "grad_norm": 1.2990822112978482, + "learning_rate": 3.71780645210355e-06, + "loss": 0.503, + "step": 8776 + }, + { + "epoch": 5.666236281471917, + "grad_norm": 1.576868932148922, + "learning_rate": 3.7035251487530783e-06, + "loss": 0.5894, + "step": 8777 + }, + { + "epoch": 5.666881859264041, + "grad_norm": 1.4556121927334063, + "learning_rate": 3.689270984917625e-06, + "loss": 0.5104, + "step": 8778 + }, + { + "epoch": 5.667527437056165, + "grad_norm": 1.368024264685956, + "learning_rate": 3.675043963241442e-06, + "loss": 0.5175, + "step": 8779 + }, + { + "epoch": 5.668173014848289, + "grad_norm": 1.4602682192865133, + "learning_rate": 3.660844086363818e-06, + "loss": 0.4983, + "step": 8780 + }, + { + "epoch": 5.668818592640413, + "grad_norm": 1.4161718220084698, + "learning_rate": 3.6466713569189787e-06, + "loss": 0.4345, + "step": 8781 + }, + { + "epoch": 5.669464170432537, + "grad_norm": 1.3952270439747916, + "learning_rate": 3.632525777536105e-06, + "loss": 0.4542, + "step": 8782 + }, + { + "epoch": 5.670109748224661, + "grad_norm": 1.369258286892698, + "learning_rate": 3.6184073508393306e-06, + "loss": 0.492, + "step": 8783 + }, + { + "epoch": 5.670755326016785, + "grad_norm": 1.5106207680878168, + "learning_rate": 3.604316079447811e-06, + "loss": 0.4061, + "step": 8784 + }, + { + "epoch": 5.671400903808909, + "grad_norm": 1.4531856953735909, + "learning_rate": 3.590251965975588e-06, + "loss": 0.4908, + "step": 8785 + }, + { + "epoch": 5.672046481601033, + "grad_norm": 1.4528724591415656, + "learning_rate": 3.576215013031708e-06, + "loss": 0.5139, + "step": 8786 + }, + { + "epoch": 5.672692059393157, + "grad_norm": 1.3336630494330741, + "learning_rate": 3.562205223220205e-06, + "loss": 0.5068, + "step": 8787 + }, + { + "epoch": 5.673337637185281, + "grad_norm": 1.5260552867935788, + "learning_rate": 3.5482225991399994e-06, + "loss": 0.5481, + "step": 8788 + }, + { + "epoch": 5.6739832149774045, + "grad_norm": 1.5207072206572494, + "learning_rate": 3.534267143385017e-06, + "loss": 0.4661, + "step": 8789 + }, + { + "epoch": 5.6746287927695285, + "grad_norm": 1.3795786942538557, + "learning_rate": 3.5203388585441692e-06, + "loss": 0.457, + "step": 8790 + }, + { + "epoch": 5.675274370561652, + "grad_norm": 1.3474077087180971, + "learning_rate": 3.5064377472012574e-06, + "loss": 0.4278, + "step": 8791 + }, + { + "epoch": 5.675919948353776, + "grad_norm": 1.2697239234750963, + "learning_rate": 3.4925638119351175e-06, + "loss": 0.4105, + "step": 8792 + }, + { + "epoch": 5.6765655261459, + "grad_norm": 1.1979529429748856, + "learning_rate": 3.478717055319491e-06, + "loss": 0.4573, + "step": 8793 + }, + { + "epoch": 5.677211103938024, + "grad_norm": 1.2775802666848475, + "learning_rate": 3.46489747992309e-06, + "loss": 0.4201, + "step": 8794 + }, + { + "epoch": 5.677856681730148, + "grad_norm": 1.6408094892586402, + "learning_rate": 3.451105088309597e-06, + "loss": 0.515, + "step": 8795 + }, + { + "epoch": 5.678502259522272, + "grad_norm": 1.429648618898782, + "learning_rate": 3.4373398830376485e-06, + "loss": 0.4219, + "step": 8796 + }, + { + "epoch": 5.679147837314396, + "grad_norm": 1.3415939856990684, + "learning_rate": 3.4236018666608355e-06, + "loss": 0.5634, + "step": 8797 + }, + { + "epoch": 5.67979341510652, + "grad_norm": 1.2888175807961952, + "learning_rate": 3.4098910417276858e-06, + "loss": 0.4525, + "step": 8798 + }, + { + "epoch": 5.680438992898644, + "grad_norm": 1.6069119404043468, + "learning_rate": 3.3962074107816985e-06, + "loss": 0.5172, + "step": 8799 + }, + { + "epoch": 5.681084570690768, + "grad_norm": 1.4514963138596775, + "learning_rate": 3.382550976361359e-06, + "loss": 0.5667, + "step": 8800 + }, + { + "epoch": 5.681730148482892, + "grad_norm": 1.933410167146575, + "learning_rate": 3.368921741000058e-06, + "loss": 0.513, + "step": 8801 + }, + { + "epoch": 5.6823757262750165, + "grad_norm": 1.4277353132022192, + "learning_rate": 3.355319707226156e-06, + "loss": 0.4937, + "step": 8802 + }, + { + "epoch": 5.6830213040671405, + "grad_norm": 1.2205422734969964, + "learning_rate": 3.3417448775629683e-06, + "loss": 0.3965, + "step": 8803 + }, + { + "epoch": 5.683666881859264, + "grad_norm": 1.380960645772455, + "learning_rate": 3.3281972545287804e-06, + "loss": 0.4773, + "step": 8804 + }, + { + "epoch": 5.684312459651388, + "grad_norm": 1.5681423647415507, + "learning_rate": 3.314676840636815e-06, + "loss": 0.5977, + "step": 8805 + }, + { + "epoch": 5.684958037443512, + "grad_norm": 1.2756184232553132, + "learning_rate": 3.3011836383952493e-06, + "loss": 0.4657, + "step": 8806 + }, + { + "epoch": 5.685603615235636, + "grad_norm": 1.31735542327096, + "learning_rate": 3.2877176503071813e-06, + "loss": 0.4594, + "step": 8807 + }, + { + "epoch": 5.68624919302776, + "grad_norm": 1.517871174536917, + "learning_rate": 3.274278878870745e-06, + "loss": 0.5461, + "step": 8808 + }, + { + "epoch": 5.686894770819884, + "grad_norm": 1.3149138602710908, + "learning_rate": 3.2608673265789477e-06, + "loss": 0.5046, + "step": 8809 + }, + { + "epoch": 5.687540348612008, + "grad_norm": 1.3355893889596095, + "learning_rate": 3.247482995919748e-06, + "loss": 0.4297, + "step": 8810 + }, + { + "epoch": 5.688185926404132, + "grad_norm": 1.3665190589508749, + "learning_rate": 3.234125889376127e-06, + "loss": 0.4926, + "step": 8811 + }, + { + "epoch": 5.688831504196256, + "grad_norm": 1.3753295173900342, + "learning_rate": 3.2207960094259356e-06, + "loss": 0.4732, + "step": 8812 + }, + { + "epoch": 5.68947708198838, + "grad_norm": 1.5091014678263015, + "learning_rate": 3.207493358541996e-06, + "loss": 0.5419, + "step": 8813 + }, + { + "epoch": 5.690122659780504, + "grad_norm": 1.5039120099746806, + "learning_rate": 3.194217939192101e-06, + "loss": 0.3931, + "step": 8814 + }, + { + "epoch": 5.690768237572628, + "grad_norm": 1.5068322298616037, + "learning_rate": 3.1809697538389967e-06, + "loss": 0.6541, + "step": 8815 + }, + { + "epoch": 5.6914138153647515, + "grad_norm": 1.5422678505053795, + "learning_rate": 3.1677488049403177e-06, + "loss": 0.5732, + "step": 8816 + }, + { + "epoch": 5.6920593931568755, + "grad_norm": 1.4539166736127687, + "learning_rate": 3.1545550949487186e-06, + "loss": 0.499, + "step": 8817 + }, + { + "epoch": 5.692704970948999, + "grad_norm": 1.4712831374543491, + "learning_rate": 3.1413886263117748e-06, + "loss": 0.4741, + "step": 8818 + }, + { + "epoch": 5.693350548741123, + "grad_norm": 1.3500973915683878, + "learning_rate": 3.1282494014719828e-06, + "loss": 0.5372, + "step": 8819 + }, + { + "epoch": 5.693996126533247, + "grad_norm": 1.4252340870478715, + "learning_rate": 3.115137422866809e-06, + "loss": 0.5437, + "step": 8820 + }, + { + "epoch": 5.694641704325371, + "grad_norm": 1.2271439743471297, + "learning_rate": 3.1020526929286584e-06, + "loss": 0.4016, + "step": 8821 + }, + { + "epoch": 5.695287282117495, + "grad_norm": 1.3397814288134666, + "learning_rate": 3.088995214084905e-06, + "loss": 0.4658, + "step": 8822 + }, + { + "epoch": 5.695932859909619, + "grad_norm": 1.4000053087345607, + "learning_rate": 3.0759649887578284e-06, + "loss": 0.5261, + "step": 8823 + }, + { + "epoch": 5.696578437701743, + "grad_norm": 1.4862573370348948, + "learning_rate": 3.062962019364662e-06, + "loss": 0.5381, + "step": 8824 + }, + { + "epoch": 5.697224015493867, + "grad_norm": 1.538534984846967, + "learning_rate": 3.049986308317626e-06, + "loss": 0.4876, + "step": 8825 + }, + { + "epoch": 5.697869593285991, + "grad_norm": 1.5412192193418077, + "learning_rate": 3.0370378580238107e-06, + "loss": 0.5473, + "step": 8826 + }, + { + "epoch": 5.698515171078115, + "grad_norm": 1.6494497340893994, + "learning_rate": 3.0241166708853126e-06, + "loss": 0.5129, + "step": 8827 + }, + { + "epoch": 5.699160748870239, + "grad_norm": 1.3394217576940879, + "learning_rate": 3.011222749299147e-06, + "loss": 0.4448, + "step": 8828 + }, + { + "epoch": 5.699806326662363, + "grad_norm": 1.504410767252545, + "learning_rate": 2.9983560956572506e-06, + "loss": 0.5028, + "step": 8829 + }, + { + "epoch": 5.700451904454487, + "grad_norm": 1.425931268681648, + "learning_rate": 2.9855167123465306e-06, + "loss": 0.5222, + "step": 8830 + }, + { + "epoch": 5.7010974822466105, + "grad_norm": 1.3904218683485965, + "learning_rate": 2.972704601748832e-06, + "loss": 0.5002, + "step": 8831 + }, + { + "epoch": 5.701743060038734, + "grad_norm": 1.5121153819134188, + "learning_rate": 2.95991976624092e-06, + "loss": 0.5314, + "step": 8832 + }, + { + "epoch": 5.702388637830858, + "grad_norm": 1.4224573931338995, + "learning_rate": 2.9471622081945313e-06, + "loss": 0.4914, + "step": 8833 + }, + { + "epoch": 5.703034215622982, + "grad_norm": 1.3059714130899982, + "learning_rate": 2.9344319299763388e-06, + "loss": 0.4298, + "step": 8834 + }, + { + "epoch": 5.703679793415106, + "grad_norm": 1.2277031319162908, + "learning_rate": 2.9217289339478868e-06, + "loss": 0.4331, + "step": 8835 + }, + { + "epoch": 5.70432537120723, + "grad_norm": 1.2944090045452263, + "learning_rate": 2.909053222465757e-06, + "loss": 0.4596, + "step": 8836 + }, + { + "epoch": 5.704970948999354, + "grad_norm": 1.2922110962165243, + "learning_rate": 2.8964047978814355e-06, + "loss": 0.4587, + "step": 8837 + }, + { + "epoch": 5.705616526791478, + "grad_norm": 8.071573640114892, + "learning_rate": 2.8837836625412948e-06, + "loss": 0.6926, + "step": 8838 + }, + { + "epoch": 5.706262104583602, + "grad_norm": 1.4465055197615897, + "learning_rate": 2.8711898187867287e-06, + "loss": 0.5191, + "step": 8839 + }, + { + "epoch": 5.706907682375727, + "grad_norm": 1.4887130736025875, + "learning_rate": 2.8586232689540024e-06, + "loss": 0.508, + "step": 8840 + }, + { + "epoch": 5.707553260167851, + "grad_norm": 1.4628436105691014, + "learning_rate": 2.846084015374317e-06, + "loss": 0.4413, + "step": 8841 + }, + { + "epoch": 5.708198837959975, + "grad_norm": 1.3784302262891839, + "learning_rate": 2.8335720603738953e-06, + "loss": 0.5232, + "step": 8842 + }, + { + "epoch": 5.7088444157520986, + "grad_norm": 1.5823244988344083, + "learning_rate": 2.8210874062737808e-06, + "loss": 0.4472, + "step": 8843 + }, + { + "epoch": 5.7094899935442225, + "grad_norm": 1.522063509097963, + "learning_rate": 2.8086300553900375e-06, + "loss": 0.5455, + "step": 8844 + }, + { + "epoch": 5.710135571336346, + "grad_norm": 2.0125759876524536, + "learning_rate": 2.796200010033617e-06, + "loss": 0.4762, + "step": 8845 + }, + { + "epoch": 5.71078114912847, + "grad_norm": 1.415190369153998, + "learning_rate": 2.783797272510424e-06, + "loss": 0.5023, + "step": 8846 + }, + { + "epoch": 5.711426726920594, + "grad_norm": 1.2134390680175067, + "learning_rate": 2.7714218451213186e-06, + "loss": 0.4388, + "step": 8847 + }, + { + "epoch": 5.712072304712718, + "grad_norm": 1.43175381650797, + "learning_rate": 2.7590737301620312e-06, + "loss": 0.5015, + "step": 8848 + }, + { + "epoch": 5.712717882504842, + "grad_norm": 1.3109384093959286, + "learning_rate": 2.746752929923296e-06, + "loss": 0.4513, + "step": 8849 + }, + { + "epoch": 5.713363460296966, + "grad_norm": 1.4349693480555261, + "learning_rate": 2.7344594466907345e-06, + "loss": 0.511, + "step": 8850 + }, + { + "epoch": 5.71400903808909, + "grad_norm": 1.3659750156165136, + "learning_rate": 2.7221932827449222e-06, + "loss": 0.4772, + "step": 8851 + }, + { + "epoch": 5.714654615881214, + "grad_norm": 1.3904488380850568, + "learning_rate": 2.709954440361356e-06, + "loss": 0.493, + "step": 8852 + }, + { + "epoch": 5.715300193673338, + "grad_norm": 1.2784203961971794, + "learning_rate": 2.6977429218104527e-06, + "loss": 0.4564, + "step": 8853 + }, + { + "epoch": 5.715945771465462, + "grad_norm": 1.7811630948870403, + "learning_rate": 2.6855587293576176e-06, + "loss": 0.499, + "step": 8854 + }, + { + "epoch": 5.716591349257586, + "grad_norm": 1.2427241673377838, + "learning_rate": 2.6734018652631083e-06, + "loss": 0.4211, + "step": 8855 + }, + { + "epoch": 5.71723692704971, + "grad_norm": 1.5226798317297543, + "learning_rate": 2.6612723317821718e-06, + "loss": 0.4613, + "step": 8856 + }, + { + "epoch": 5.717882504841834, + "grad_norm": 1.3310319084012692, + "learning_rate": 2.649170131164924e-06, + "loss": 0.4954, + "step": 8857 + }, + { + "epoch": 5.7185280826339575, + "grad_norm": 1.59727151858932, + "learning_rate": 2.637095265656486e-06, + "loss": 0.5892, + "step": 8858 + }, + { + "epoch": 5.719173660426081, + "grad_norm": 1.577481247249527, + "learning_rate": 2.6250477374968828e-06, + "loss": 0.6145, + "step": 8859 + }, + { + "epoch": 5.719819238218205, + "grad_norm": 1.4477308540589005, + "learning_rate": 2.6130275489210096e-06, + "loss": 0.4908, + "step": 8860 + }, + { + "epoch": 5.720464816010329, + "grad_norm": 1.3969492864254773, + "learning_rate": 2.601034702158783e-06, + "loss": 0.5501, + "step": 8861 + }, + { + "epoch": 5.721110393802453, + "grad_norm": 1.3445605548883828, + "learning_rate": 2.589069199434973e-06, + "loss": 0.4371, + "step": 8862 + }, + { + "epoch": 5.721755971594577, + "grad_norm": 1.464006983775766, + "learning_rate": 2.5771310429693047e-06, + "loss": 0.5214, + "step": 8863 + }, + { + "epoch": 5.722401549386701, + "grad_norm": 1.4315462407157713, + "learning_rate": 2.5652202349764396e-06, + "loss": 0.4, + "step": 8864 + }, + { + "epoch": 5.723047127178825, + "grad_norm": 1.3503414089978478, + "learning_rate": 2.55333677766596e-06, + "loss": 0.4796, + "step": 8865 + }, + { + "epoch": 5.723692704970949, + "grad_norm": 1.4244860131099248, + "learning_rate": 2.5414806732423355e-06, + "loss": 0.4842, + "step": 8866 + }, + { + "epoch": 5.724338282763073, + "grad_norm": 1.47759829500058, + "learning_rate": 2.5296519239050406e-06, + "loss": 0.5118, + "step": 8867 + }, + { + "epoch": 5.724983860555197, + "grad_norm": 1.4456712208447178, + "learning_rate": 2.5178505318484033e-06, + "loss": 0.4635, + "step": 8868 + }, + { + "epoch": 5.725629438347321, + "grad_norm": 1.4491672487923037, + "learning_rate": 2.5060764992617387e-06, + "loss": 0.5425, + "step": 8869 + }, + { + "epoch": 5.726275016139445, + "grad_norm": 1.3574661272260464, + "learning_rate": 2.4943298283292e-06, + "loss": 0.5122, + "step": 8870 + }, + { + "epoch": 5.726920593931569, + "grad_norm": 1.2859853295124968, + "learning_rate": 2.4826105212299607e-06, + "loss": 0.4564, + "step": 8871 + }, + { + "epoch": 5.7275661717236925, + "grad_norm": 1.368554028474214, + "learning_rate": 2.470918580138065e-06, + "loss": 0.4841, + "step": 8872 + }, + { + "epoch": 5.7282117495158165, + "grad_norm": 1.409613621666091, + "learning_rate": 2.4592540072224775e-06, + "loss": 0.5103, + "step": 8873 + }, + { + "epoch": 5.72885732730794, + "grad_norm": 1.5383125122411874, + "learning_rate": 2.447616804647101e-06, + "loss": 0.5505, + "step": 8874 + }, + { + "epoch": 5.729502905100064, + "grad_norm": 1.4776129148930432, + "learning_rate": 2.436006974570759e-06, + "loss": 0.5455, + "step": 8875 + }, + { + "epoch": 5.730148482892188, + "grad_norm": 1.4024534806782643, + "learning_rate": 2.4244245191471946e-06, + "loss": 0.3989, + "step": 8876 + }, + { + "epoch": 5.730794060684312, + "grad_norm": 1.5935446452940794, + "learning_rate": 2.412869440525089e-06, + "loss": 0.5191, + "step": 8877 + }, + { + "epoch": 5.731439638476436, + "grad_norm": 1.3871122156609796, + "learning_rate": 2.401341740847995e-06, + "loss": 0.5551, + "step": 8878 + }, + { + "epoch": 5.73208521626856, + "grad_norm": 1.2771917854423642, + "learning_rate": 2.3898414222544513e-06, + "loss": 0.4659, + "step": 8879 + }, + { + "epoch": 5.732730794060684, + "grad_norm": 1.3051851285593865, + "learning_rate": 2.3783684868778685e-06, + "loss": 0.444, + "step": 8880 + }, + { + "epoch": 5.733376371852808, + "grad_norm": 1.4097504365823652, + "learning_rate": 2.36692293684661e-06, + "loss": 0.491, + "step": 8881 + }, + { + "epoch": 5.734021949644932, + "grad_norm": 1.4251819992788288, + "learning_rate": 2.3555047742839283e-06, + "loss": 0.518, + "step": 8882 + }, + { + "epoch": 5.734667527437056, + "grad_norm": 1.7048598350328992, + "learning_rate": 2.344114001308028e-06, + "loss": 0.6598, + "step": 8883 + }, + { + "epoch": 5.73531310522918, + "grad_norm": 1.9101412467084198, + "learning_rate": 2.3327506200320034e-06, + "loss": 0.5476, + "step": 8884 + }, + { + "epoch": 5.735958683021304, + "grad_norm": 1.5844156061277483, + "learning_rate": 2.321414632563884e-06, + "loss": 0.5335, + "step": 8885 + }, + { + "epoch": 5.736604260813428, + "grad_norm": 1.3428855974842275, + "learning_rate": 2.3101060410066042e-06, + "loss": 0.4776, + "step": 8886 + }, + { + "epoch": 5.7372498386055515, + "grad_norm": 1.6332021646168167, + "learning_rate": 2.298824847458053e-06, + "loss": 0.6298, + "step": 8887 + }, + { + "epoch": 5.737895416397676, + "grad_norm": 1.4225813185650484, + "learning_rate": 2.2875710540109726e-06, + "loss": 0.5438, + "step": 8888 + }, + { + "epoch": 5.7385409941898, + "grad_norm": 1.5393250311463234, + "learning_rate": 2.276344662753077e-06, + "loss": 0.465, + "step": 8889 + }, + { + "epoch": 5.739186571981924, + "grad_norm": 1.7538624190075596, + "learning_rate": 2.2651456757669993e-06, + "loss": 0.4675, + "step": 8890 + }, + { + "epoch": 5.739832149774048, + "grad_norm": 1.3662267454229977, + "learning_rate": 2.253974095130212e-06, + "loss": 0.4433, + "step": 8891 + }, + { + "epoch": 5.740477727566172, + "grad_norm": 1.3726919373707598, + "learning_rate": 2.242829922915207e-06, + "loss": 0.4963, + "step": 8892 + }, + { + "epoch": 5.741123305358296, + "grad_norm": 1.4026025648443563, + "learning_rate": 2.23171316118933e-06, + "loss": 0.4852, + "step": 8893 + }, + { + "epoch": 5.74176888315042, + "grad_norm": 1.3217018182074332, + "learning_rate": 2.220623812014866e-06, + "loss": 0.4926, + "step": 8894 + }, + { + "epoch": 5.742414460942544, + "grad_norm": 1.3862621204894177, + "learning_rate": 2.2095618774489843e-06, + "loss": 0.4312, + "step": 8895 + }, + { + "epoch": 5.743060038734668, + "grad_norm": 1.4467890218509447, + "learning_rate": 2.1985273595437948e-06, + "loss": 0.4853, + "step": 8896 + }, + { + "epoch": 5.743705616526792, + "grad_norm": 1.4850870137761176, + "learning_rate": 2.187520260346326e-06, + "loss": 0.5094, + "step": 8897 + }, + { + "epoch": 5.744351194318916, + "grad_norm": 1.3751467685735117, + "learning_rate": 2.176540581898495e-06, + "loss": 0.4997, + "step": 8898 + }, + { + "epoch": 5.7449967721110395, + "grad_norm": 1.4896005303592665, + "learning_rate": 2.1655883262371555e-06, + "loss": 0.5105, + "step": 8899 + }, + { + "epoch": 5.7456423499031635, + "grad_norm": 1.3687680657859558, + "learning_rate": 2.1546634953940657e-06, + "loss": 0.4551, + "step": 8900 + }, + { + "epoch": 5.746287927695287, + "grad_norm": 1.5001872647523018, + "learning_rate": 2.1437660913958876e-06, + "loss": 0.5127, + "step": 8901 + }, + { + "epoch": 5.746933505487411, + "grad_norm": 1.446414786684569, + "learning_rate": 2.1328961162642042e-06, + "loss": 0.4378, + "step": 8902 + }, + { + "epoch": 5.747579083279535, + "grad_norm": 1.3810177767693146, + "learning_rate": 2.122053572015503e-06, + "loss": 0.4901, + "step": 8903 + }, + { + "epoch": 5.748224661071659, + "grad_norm": 1.5867723483609912, + "learning_rate": 2.1112384606612074e-06, + "loss": 0.4708, + "step": 8904 + }, + { + "epoch": 5.748870238863783, + "grad_norm": 1.4624298818853074, + "learning_rate": 2.10045078420763e-06, + "loss": 0.5171, + "step": 8905 + }, + { + "epoch": 5.749515816655907, + "grad_norm": 1.4326462667414694, + "learning_rate": 2.08969054465597e-06, + "loss": 0.5296, + "step": 8906 + }, + { + "epoch": 5.750161394448031, + "grad_norm": 1.7534670849209109, + "learning_rate": 2.0789577440023797e-06, + "loss": 0.5617, + "step": 8907 + }, + { + "epoch": 5.750806972240155, + "grad_norm": 1.2605494978656984, + "learning_rate": 2.0682523842379174e-06, + "loss": 0.4478, + "step": 8908 + }, + { + "epoch": 5.751452550032279, + "grad_norm": 1.4906775740486315, + "learning_rate": 2.057574467348544e-06, + "loss": 0.5421, + "step": 8909 + }, + { + "epoch": 5.752098127824403, + "grad_norm": 1.3466615090617935, + "learning_rate": 2.0469239953150918e-06, + "loss": 0.4418, + "step": 8910 + }, + { + "epoch": 5.752743705616527, + "grad_norm": 1.426914643760507, + "learning_rate": 2.036300970113347e-06, + "loss": 0.5336, + "step": 8911 + }, + { + "epoch": 5.753389283408651, + "grad_norm": 1.6515972404337576, + "learning_rate": 2.0257053937140165e-06, + "loss": 0.5716, + "step": 8912 + }, + { + "epoch": 5.754034861200775, + "grad_norm": 1.3982473034109961, + "learning_rate": 2.015137268082645e-06, + "loss": 0.505, + "step": 8913 + }, + { + "epoch": 5.7546804389928985, + "grad_norm": 1.4567872615398119, + "learning_rate": 2.004596595179747e-06, + "loss": 0.5612, + "step": 8914 + }, + { + "epoch": 5.755326016785022, + "grad_norm": 1.4667978182791386, + "learning_rate": 1.994083376960759e-06, + "loss": 0.5267, + "step": 8915 + }, + { + "epoch": 5.755971594577146, + "grad_norm": 1.3789515029787245, + "learning_rate": 1.9835976153759538e-06, + "loss": 0.5059, + "step": 8916 + }, + { + "epoch": 5.75661717236927, + "grad_norm": 1.493071554700095, + "learning_rate": 1.9731393123705584e-06, + "loss": 0.4916, + "step": 8917 + }, + { + "epoch": 5.757262750161394, + "grad_norm": 1.4250009028532635, + "learning_rate": 1.9627084698846886e-06, + "loss": 0.4891, + "step": 8918 + }, + { + "epoch": 5.757908327953518, + "grad_norm": 1.606897875159425, + "learning_rate": 1.9523050898533955e-06, + "loss": 0.6208, + "step": 8919 + }, + { + "epoch": 5.758553905745642, + "grad_norm": 1.642473149120384, + "learning_rate": 1.9419291742066025e-06, + "loss": 0.513, + "step": 8920 + }, + { + "epoch": 5.759199483537766, + "grad_norm": 1.3858035147033037, + "learning_rate": 1.931580724869153e-06, + "loss": 0.4907, + "step": 8921 + }, + { + "epoch": 5.75984506132989, + "grad_norm": 1.3328576105620402, + "learning_rate": 1.9212597437607947e-06, + "loss": 0.4886, + "step": 8922 + }, + { + "epoch": 5.760490639122014, + "grad_norm": 1.2822992458111109, + "learning_rate": 1.9109662327961626e-06, + "loss": 0.4641, + "step": 8923 + }, + { + "epoch": 5.761136216914138, + "grad_norm": 1.5801323051795966, + "learning_rate": 1.9007001938848132e-06, + "loss": 0.6124, + "step": 8924 + }, + { + "epoch": 5.761781794706262, + "grad_norm": 1.4136568493840735, + "learning_rate": 1.8904616289312224e-06, + "loss": 0.4845, + "step": 8925 + }, + { + "epoch": 5.7624273724983865, + "grad_norm": 1.3238033702947878, + "learning_rate": 1.8802505398347212e-06, + "loss": 0.4382, + "step": 8926 + }, + { + "epoch": 5.7630729502905105, + "grad_norm": 1.4079786019101797, + "learning_rate": 1.8700669284895941e-06, + "loss": 0.4768, + "step": 8927 + }, + { + "epoch": 5.763718528082634, + "grad_norm": 1.3189567550816195, + "learning_rate": 1.8599107967849968e-06, + "loss": 0.4275, + "step": 8928 + }, + { + "epoch": 5.764364105874758, + "grad_norm": 1.3891760798317674, + "learning_rate": 1.8497821466050057e-06, + "loss": 0.4902, + "step": 8929 + }, + { + "epoch": 5.765009683666882, + "grad_norm": 1.515837327288165, + "learning_rate": 1.8396809798285838e-06, + "loss": 0.5285, + "step": 8930 + }, + { + "epoch": 5.765655261459006, + "grad_norm": 1.6016153295003352, + "learning_rate": 1.8296072983295996e-06, + "loss": 0.5228, + "step": 8931 + }, + { + "epoch": 5.76630083925113, + "grad_norm": 1.401476014103217, + "learning_rate": 1.8195611039768244e-06, + "loss": 0.4471, + "step": 8932 + }, + { + "epoch": 5.766946417043254, + "grad_norm": 1.3354763600718236, + "learning_rate": 1.8095423986339509e-06, + "loss": 0.441, + "step": 8933 + }, + { + "epoch": 5.767591994835378, + "grad_norm": 1.7701543124130559, + "learning_rate": 1.7995511841595423e-06, + "loss": 0.5152, + "step": 8934 + }, + { + "epoch": 5.768237572627502, + "grad_norm": 1.38100505310959, + "learning_rate": 1.789587462407066e-06, + "loss": 0.4982, + "step": 8935 + }, + { + "epoch": 5.768883150419626, + "grad_norm": 1.752474130330619, + "learning_rate": 1.7796512352248937e-06, + "loss": 0.5265, + "step": 8936 + }, + { + "epoch": 5.76952872821175, + "grad_norm": 1.3206396904490731, + "learning_rate": 1.7697425044563174e-06, + "loss": 0.4563, + "step": 8937 + }, + { + "epoch": 5.770174306003874, + "grad_norm": 1.6897340331746382, + "learning_rate": 1.7598612719394668e-06, + "loss": 0.4356, + "step": 8938 + }, + { + "epoch": 5.770819883795998, + "grad_norm": 1.7617894666209442, + "learning_rate": 1.7500075395074753e-06, + "loss": 0.4922, + "step": 8939 + }, + { + "epoch": 5.771465461588122, + "grad_norm": 1.301137341520656, + "learning_rate": 1.7401813089882643e-06, + "loss": 0.4462, + "step": 8940 + }, + { + "epoch": 5.7721110393802455, + "grad_norm": 1.235146521450961, + "learning_rate": 1.7303825822047256e-06, + "loss": 0.3945, + "step": 8941 + }, + { + "epoch": 5.772756617172369, + "grad_norm": 1.3803886839000716, + "learning_rate": 1.7206113609746052e-06, + "loss": 0.5015, + "step": 8942 + }, + { + "epoch": 5.773402194964493, + "grad_norm": 1.4190263646222334, + "learning_rate": 1.7108676471105864e-06, + "loss": 0.4735, + "step": 8943 + }, + { + "epoch": 5.774047772756617, + "grad_norm": 1.3961844201406737, + "learning_rate": 1.7011514424202234e-06, + "loss": 0.5159, + "step": 8944 + }, + { + "epoch": 5.774693350548741, + "grad_norm": 1.2881088867932013, + "learning_rate": 1.6914627487059573e-06, + "loss": 0.5021, + "step": 8945 + }, + { + "epoch": 5.775338928340865, + "grad_norm": 1.7302985788686274, + "learning_rate": 1.6818015677651674e-06, + "loss": 0.5233, + "step": 8946 + }, + { + "epoch": 5.775984506132989, + "grad_norm": 1.465718902217023, + "learning_rate": 1.6721679013900868e-06, + "loss": 0.4474, + "step": 8947 + }, + { + "epoch": 5.776630083925113, + "grad_norm": 1.6183250748647788, + "learning_rate": 1.6625617513678525e-06, + "loss": 0.5683, + "step": 8948 + }, + { + "epoch": 5.777275661717237, + "grad_norm": 1.5676173941918643, + "learning_rate": 1.6529831194805387e-06, + "loss": 0.5451, + "step": 8949 + }, + { + "epoch": 5.777921239509361, + "grad_norm": 1.3872831601236284, + "learning_rate": 1.643432007505041e-06, + "loss": 0.5301, + "step": 8950 + }, + { + "epoch": 5.778566817301485, + "grad_norm": 1.3416973363430762, + "learning_rate": 1.6339084172132089e-06, + "loss": 0.4633, + "step": 8951 + }, + { + "epoch": 5.779212395093609, + "grad_norm": 1.3225217320365317, + "learning_rate": 1.624412350371762e-06, + "loss": 0.5495, + "step": 8952 + }, + { + "epoch": 5.779857972885733, + "grad_norm": 1.3960662477758607, + "learning_rate": 1.6149438087423417e-06, + "loss": 0.5215, + "step": 8953 + }, + { + "epoch": 5.780503550677857, + "grad_norm": 1.4241166103833194, + "learning_rate": 1.6055027940814258e-06, + "loss": 0.4747, + "step": 8954 + }, + { + "epoch": 5.7811491284699805, + "grad_norm": 1.452307680662201, + "learning_rate": 1.596089308140447e-06, + "loss": 0.5007, + "step": 8955 + }, + { + "epoch": 5.7817947062621045, + "grad_norm": 1.3787005240394354, + "learning_rate": 1.5867033526656748e-06, + "loss": 0.4115, + "step": 8956 + }, + { + "epoch": 5.782440284054228, + "grad_norm": 2.4185174224130503, + "learning_rate": 1.577344929398333e-06, + "loss": 0.4963, + "step": 8957 + }, + { + "epoch": 5.783085861846352, + "grad_norm": 1.3606848340478876, + "learning_rate": 1.5680140400745001e-06, + "loss": 0.5325, + "step": 8958 + }, + { + "epoch": 5.783731439638476, + "grad_norm": 1.5692124010619908, + "learning_rate": 1.5587106864251574e-06, + "loss": 0.6242, + "step": 8959 + }, + { + "epoch": 5.7843770174306, + "grad_norm": 1.3652685835817944, + "learning_rate": 1.549434870176125e-06, + "loss": 0.4831, + "step": 8960 + }, + { + "epoch": 5.785022595222724, + "grad_norm": 1.2208378816998273, + "learning_rate": 1.5401865930482426e-06, + "loss": 0.426, + "step": 8961 + }, + { + "epoch": 5.785668173014848, + "grad_norm": 1.5208917341358914, + "learning_rate": 1.5309658567571048e-06, + "loss": 0.5207, + "step": 8962 + }, + { + "epoch": 5.786313750806972, + "grad_norm": 1.4634790611955102, + "learning_rate": 1.52177266301326e-06, + "loss": 0.5514, + "step": 8963 + }, + { + "epoch": 5.786959328599096, + "grad_norm": 1.4141509564280001, + "learning_rate": 1.51260701352216e-06, + "loss": 0.4675, + "step": 8964 + }, + { + "epoch": 5.78760490639122, + "grad_norm": 1.5238898609842555, + "learning_rate": 1.5034689099841291e-06, + "loss": 0.5228, + "step": 8965 + }, + { + "epoch": 5.788250484183344, + "grad_norm": 1.4751839354565868, + "learning_rate": 1.4943583540943605e-06, + "loss": 0.531, + "step": 8966 + }, + { + "epoch": 5.788896061975468, + "grad_norm": 1.748859213775671, + "learning_rate": 1.4852753475429524e-06, + "loss": 0.4977, + "step": 8967 + }, + { + "epoch": 5.789541639767592, + "grad_norm": 1.441233776061698, + "learning_rate": 1.4762198920149238e-06, + "loss": 0.3953, + "step": 8968 + }, + { + "epoch": 5.7901872175597155, + "grad_norm": 1.3598422168672446, + "learning_rate": 1.4671919891901474e-06, + "loss": 0.4196, + "step": 8969 + }, + { + "epoch": 5.7908327953518395, + "grad_norm": 1.7848054399644064, + "learning_rate": 1.4581916407433837e-06, + "loss": 0.4509, + "step": 8970 + }, + { + "epoch": 5.791478373143963, + "grad_norm": 1.608803502075869, + "learning_rate": 1.4492188483442969e-06, + "loss": 0.5518, + "step": 8971 + }, + { + "epoch": 5.792123950936087, + "grad_norm": 1.4200050991287803, + "learning_rate": 1.440273613657439e-06, + "loss": 0.554, + "step": 8972 + }, + { + "epoch": 5.792769528728211, + "grad_norm": 1.2888715841214966, + "learning_rate": 1.4313559383422324e-06, + "loss": 0.4489, + "step": 8973 + }, + { + "epoch": 5.793415106520336, + "grad_norm": 1.6285717487200317, + "learning_rate": 1.4224658240529875e-06, + "loss": 0.576, + "step": 8974 + }, + { + "epoch": 5.79406068431246, + "grad_norm": 1.440707319998193, + "learning_rate": 1.4136032724389512e-06, + "loss": 0.5866, + "step": 8975 + }, + { + "epoch": 5.794706262104584, + "grad_norm": 1.4595751903583816, + "learning_rate": 1.4047682851441755e-06, + "loss": 0.4565, + "step": 8976 + }, + { + "epoch": 5.795351839896708, + "grad_norm": 1.421411806859764, + "learning_rate": 1.395960863807666e-06, + "loss": 0.4738, + "step": 8977 + }, + { + "epoch": 5.795997417688832, + "grad_norm": 1.410326939899613, + "learning_rate": 1.3871810100632985e-06, + "loss": 0.4222, + "step": 8978 + }, + { + "epoch": 5.796642995480956, + "grad_norm": 1.3407171671779907, + "learning_rate": 1.378428725539804e-06, + "loss": 0.4663, + "step": 8979 + }, + { + "epoch": 5.79728857327308, + "grad_norm": 1.3656563896911675, + "learning_rate": 1.3697040118608504e-06, + "loss": 0.5005, + "step": 8980 + }, + { + "epoch": 5.797934151065204, + "grad_norm": 2.078342853092212, + "learning_rate": 1.3610068706449261e-06, + "loss": 0.4963, + "step": 8981 + }, + { + "epoch": 5.7985797288573275, + "grad_norm": 1.477763253010102, + "learning_rate": 1.3523373035054574e-06, + "loss": 0.5352, + "step": 8982 + }, + { + "epoch": 5.7992253066494515, + "grad_norm": 1.2347730333210507, + "learning_rate": 1.3436953120507577e-06, + "loss": 0.4566, + "step": 8983 + }, + { + "epoch": 5.799870884441575, + "grad_norm": 1.2620487503257014, + "learning_rate": 1.3350808978839945e-06, + "loss": 0.46, + "step": 8984 + }, + { + "epoch": 5.800516462233699, + "grad_norm": 1.2578235231350194, + "learning_rate": 1.3264940626032228e-06, + "loss": 0.4692, + "step": 8985 + }, + { + "epoch": 5.801162040025823, + "grad_norm": 1.3924778084050742, + "learning_rate": 1.3179348078013851e-06, + "loss": 0.4842, + "step": 8986 + }, + { + "epoch": 5.801807617817947, + "grad_norm": 1.823253748825476, + "learning_rate": 1.3094031350663447e-06, + "loss": 0.5163, + "step": 8987 + }, + { + "epoch": 5.802453195610071, + "grad_norm": 1.3580943883436467, + "learning_rate": 1.3008990459807688e-06, + "loss": 0.4893, + "step": 8988 + }, + { + "epoch": 5.803098773402195, + "grad_norm": 1.9051452877914117, + "learning_rate": 1.2924225421222789e-06, + "loss": 0.5077, + "step": 8989 + }, + { + "epoch": 5.803744351194319, + "grad_norm": 1.4273385237388188, + "learning_rate": 1.283973625063367e-06, + "loss": 0.5453, + "step": 8990 + }, + { + "epoch": 5.804389928986443, + "grad_norm": 1.3338683285147976, + "learning_rate": 1.2755522963713793e-06, + "loss": 0.4633, + "step": 8991 + }, + { + "epoch": 5.805035506778567, + "grad_norm": 1.3965107942318287, + "learning_rate": 1.2671585576085496e-06, + "loss": 0.4426, + "step": 8992 + }, + { + "epoch": 5.805681084570691, + "grad_norm": 1.3465198981560476, + "learning_rate": 1.2587924103320324e-06, + "loss": 0.4717, + "step": 8993 + }, + { + "epoch": 5.806326662362815, + "grad_norm": 1.7513986574922717, + "learning_rate": 1.2504538560938026e-06, + "loss": 0.6723, + "step": 8994 + }, + { + "epoch": 5.806972240154939, + "grad_norm": 1.2739037781886249, + "learning_rate": 1.2421428964407732e-06, + "loss": 0.4332, + "step": 8995 + }, + { + "epoch": 5.8076178179470626, + "grad_norm": 1.5964203906696879, + "learning_rate": 1.2338595329146938e-06, + "loss": 0.5703, + "step": 8996 + }, + { + "epoch": 5.8082633957391865, + "grad_norm": 1.3560616228337932, + "learning_rate": 1.2256037670522356e-06, + "loss": 0.5168, + "step": 8997 + }, + { + "epoch": 5.80890897353131, + "grad_norm": 1.4573734920044283, + "learning_rate": 1.2173756003849233e-06, + "loss": 0.5535, + "step": 8998 + }, + { + "epoch": 5.809554551323434, + "grad_norm": 1.443379936996481, + "learning_rate": 1.2091750344391526e-06, + "loss": 0.471, + "step": 8999 + }, + { + "epoch": 5.810200129115558, + "grad_norm": 1.409379520290541, + "learning_rate": 1.2010020707362234e-06, + "loss": 0.5459, + "step": 9000 + }, + { + "epoch": 5.810845706907682, + "grad_norm": 1.2619855405739795, + "learning_rate": 1.1928567107923059e-06, + "loss": 0.4211, + "step": 9001 + }, + { + "epoch": 5.811491284699806, + "grad_norm": 1.461359151503603, + "learning_rate": 1.1847389561184418e-06, + "loss": 0.5266, + "step": 9002 + }, + { + "epoch": 5.81213686249193, + "grad_norm": 1.2824230516702553, + "learning_rate": 1.176648808220576e-06, + "loss": 0.5037, + "step": 9003 + }, + { + "epoch": 5.812782440284054, + "grad_norm": 1.53815566283455, + "learning_rate": 1.1685862685995084e-06, + "loss": 0.5463, + "step": 9004 + }, + { + "epoch": 5.813428018076178, + "grad_norm": 1.2887169072155273, + "learning_rate": 1.1605513387509091e-06, + "loss": 0.4833, + "step": 9005 + }, + { + "epoch": 5.814073595868302, + "grad_norm": 1.5129302363879813, + "learning_rate": 1.1525440201653525e-06, + "loss": 0.5385, + "step": 9006 + }, + { + "epoch": 5.814719173660426, + "grad_norm": 1.4040946210171206, + "learning_rate": 1.1445643143282667e-06, + "loss": 0.4698, + "step": 9007 + }, + { + "epoch": 5.81536475145255, + "grad_norm": 1.2521179395362292, + "learning_rate": 1.1366122227200014e-06, + "loss": 0.4491, + "step": 9008 + }, + { + "epoch": 5.816010329244674, + "grad_norm": 1.3883032450199768, + "learning_rate": 1.1286877468157263e-06, + "loss": 0.4441, + "step": 9009 + }, + { + "epoch": 5.816655907036798, + "grad_norm": 1.53458305413822, + "learning_rate": 1.1207908880855155e-06, + "loss": 0.5759, + "step": 9010 + }, + { + "epoch": 5.8173014848289215, + "grad_norm": 1.489366159008359, + "learning_rate": 1.1129216479943303e-06, + "loss": 0.5242, + "step": 9011 + }, + { + "epoch": 5.817947062621046, + "grad_norm": 1.5203085005316619, + "learning_rate": 1.1050800280019867e-06, + "loss": 0.5301, + "step": 9012 + }, + { + "epoch": 5.81859264041317, + "grad_norm": 1.45356889416264, + "learning_rate": 1.0972660295631875e-06, + "loss": 0.5028, + "step": 9013 + }, + { + "epoch": 5.819238218205294, + "grad_norm": 1.3655102655097173, + "learning_rate": 1.0894796541275064e-06, + "loss": 0.5166, + "step": 9014 + }, + { + "epoch": 5.819883795997418, + "grad_norm": 1.5835193875389675, + "learning_rate": 1.0817209031394048e-06, + "loss": 0.5249, + "step": 9015 + }, + { + "epoch": 5.820529373789542, + "grad_norm": 1.377680938019602, + "learning_rate": 1.0739897780382145e-06, + "loss": 0.4953, + "step": 9016 + }, + { + "epoch": 5.821174951581666, + "grad_norm": 1.2210633702516103, + "learning_rate": 1.0662862802581384e-06, + "loss": 0.4381, + "step": 9017 + }, + { + "epoch": 5.82182052937379, + "grad_norm": 1.2147669983117442, + "learning_rate": 1.0586104112282333e-06, + "loss": 0.3952, + "step": 9018 + }, + { + "epoch": 5.822466107165914, + "grad_norm": 1.4089965234838586, + "learning_rate": 1.050962172372477e-06, + "loss": 0.4935, + "step": 9019 + }, + { + "epoch": 5.823111684958038, + "grad_norm": 1.2791204603250994, + "learning_rate": 1.043341565109701e-06, + "loss": 0.3912, + "step": 9020 + }, + { + "epoch": 5.823757262750162, + "grad_norm": 1.3570034847678274, + "learning_rate": 1.0357485908535913e-06, + "loss": 0.42, + "step": 9021 + }, + { + "epoch": 5.824402840542286, + "grad_norm": 1.4768429880919454, + "learning_rate": 1.0281832510127208e-06, + "loss": 0.514, + "step": 9022 + }, + { + "epoch": 5.82504841833441, + "grad_norm": 1.367906802284406, + "learning_rate": 1.0206455469905672e-06, + "loss": 0.4946, + "step": 9023 + }, + { + "epoch": 5.8256939961265335, + "grad_norm": 1.2826073284377655, + "learning_rate": 1.013135480185412e-06, + "loss": 0.4432, + "step": 9024 + }, + { + "epoch": 5.826339573918657, + "grad_norm": 1.370401836783844, + "learning_rate": 1.0056530519904738e-06, + "loss": 0.5078, + "step": 9025 + }, + { + "epoch": 5.826985151710781, + "grad_norm": 1.4341885537913348, + "learning_rate": 9.981982637938257e-07, + "loss": 0.5214, + "step": 9026 + }, + { + "epoch": 5.827630729502905, + "grad_norm": 1.4857964671094306, + "learning_rate": 9.90771116978395e-07, + "loss": 0.5669, + "step": 9027 + }, + { + "epoch": 5.828276307295029, + "grad_norm": 1.600848265758487, + "learning_rate": 9.83371612921996e-07, + "loss": 0.5302, + "step": 9028 + }, + { + "epoch": 5.828921885087153, + "grad_norm": 1.449783590534363, + "learning_rate": 9.759997529973307e-07, + "loss": 0.5685, + "step": 9029 + }, + { + "epoch": 5.829567462879277, + "grad_norm": 1.3143889753532723, + "learning_rate": 9.68655538571922e-07, + "loss": 0.438, + "step": 9030 + }, + { + "epoch": 5.830213040671401, + "grad_norm": 1.2343070758750299, + "learning_rate": 9.613389710082298e-07, + "loss": 0.428, + "step": 9031 + }, + { + "epoch": 5.830858618463525, + "grad_norm": 1.4281847128992622, + "learning_rate": 9.540500516635352e-07, + "loss": 0.4987, + "step": 9032 + }, + { + "epoch": 5.831504196255649, + "grad_norm": 1.3207505100627437, + "learning_rate": 9.467887818900233e-07, + "loss": 0.4987, + "step": 9033 + }, + { + "epoch": 5.832149774047773, + "grad_norm": 1.3080508451683985, + "learning_rate": 9.395551630347331e-07, + "loss": 0.4432, + "step": 9034 + }, + { + "epoch": 5.832795351839897, + "grad_norm": 1.4350781139883264, + "learning_rate": 9.32349196439558e-07, + "loss": 0.5239, + "step": 9035 + }, + { + "epoch": 5.833440929632021, + "grad_norm": 1.3584769054948571, + "learning_rate": 9.251708834412952e-07, + "loss": 0.4917, + "step": 9036 + }, + { + "epoch": 5.834086507424145, + "grad_norm": 1.4532539585964075, + "learning_rate": 9.180202253715962e-07, + "loss": 0.4973, + "step": 9037 + }, + { + "epoch": 5.8347320852162685, + "grad_norm": 1.2699057906179958, + "learning_rate": 9.108972235569667e-07, + "loss": 0.3981, + "step": 9038 + }, + { + "epoch": 5.8353776630083924, + "grad_norm": 1.458734387441992, + "learning_rate": 9.038018793188162e-07, + "loss": 0.5294, + "step": 9039 + }, + { + "epoch": 5.836023240800516, + "grad_norm": 1.4062038154616583, + "learning_rate": 8.967341939734085e-07, + "loss": 0.4804, + "step": 9040 + }, + { + "epoch": 5.83666881859264, + "grad_norm": 1.61183481891085, + "learning_rate": 8.896941688318449e-07, + "loss": 0.6196, + "step": 9041 + }, + { + "epoch": 5.837314396384764, + "grad_norm": 1.2950805057725852, + "learning_rate": 8.826818052001472e-07, + "loss": 0.441, + "step": 9042 + }, + { + "epoch": 5.837959974176888, + "grad_norm": 1.479348855124465, + "learning_rate": 8.75697104379175e-07, + "loss": 0.5362, + "step": 9043 + }, + { + "epoch": 5.838605551969012, + "grad_norm": 1.459744367631925, + "learning_rate": 8.687400676646583e-07, + "loss": 0.5458, + "step": 9044 + }, + { + "epoch": 5.839251129761136, + "grad_norm": 1.4359794084946702, + "learning_rate": 8.618106963472149e-07, + "loss": 0.5022, + "step": 9045 + }, + { + "epoch": 5.83989670755326, + "grad_norm": 1.2975476064887297, + "learning_rate": 8.549089917122831e-07, + "loss": 0.474, + "step": 9046 + }, + { + "epoch": 5.840542285345384, + "grad_norm": 1.399480697888797, + "learning_rate": 8.480349550402388e-07, + "loss": 0.4906, + "step": 9047 + }, + { + "epoch": 5.841187863137508, + "grad_norm": 1.4554677223585752, + "learning_rate": 8.411885876062618e-07, + "loss": 0.5842, + "step": 9048 + }, + { + "epoch": 5.841833440929632, + "grad_norm": 1.3176655435179483, + "learning_rate": 8.343698906804363e-07, + "loss": 0.4254, + "step": 9049 + }, + { + "epoch": 5.842479018721756, + "grad_norm": 1.1845536131911287, + "learning_rate": 8.275788655277005e-07, + "loss": 0.3904, + "step": 9050 + }, + { + "epoch": 5.84312459651388, + "grad_norm": 1.640827408262145, + "learning_rate": 8.208155134078631e-07, + "loss": 0.5167, + "step": 9051 + }, + { + "epoch": 5.8437701743060035, + "grad_norm": 1.3693364877890934, + "learning_rate": 8.140798355755706e-07, + "loss": 0.4545, + "step": 9052 + }, + { + "epoch": 5.8444157520981275, + "grad_norm": 1.2674973880730052, + "learning_rate": 8.073718332804069e-07, + "loss": 0.4466, + "step": 9053 + }, + { + "epoch": 5.845061329890251, + "grad_norm": 1.629429497992039, + "learning_rate": 8.006915077667264e-07, + "loss": 0.5184, + "step": 9054 + }, + { + "epoch": 5.845706907682375, + "grad_norm": 1.7088545823181844, + "learning_rate": 7.940388602738379e-07, + "loss": 0.4942, + "step": 9055 + }, + { + "epoch": 5.846352485474499, + "grad_norm": 1.4778778512645934, + "learning_rate": 7.874138920358541e-07, + "loss": 0.609, + "step": 9056 + }, + { + "epoch": 5.846998063266623, + "grad_norm": 1.4061732665061786, + "learning_rate": 7.808166042817753e-07, + "loss": 0.4859, + "step": 9057 + }, + { + "epoch": 5.847643641058747, + "grad_norm": 1.4255901818669445, + "learning_rate": 7.742469982354727e-07, + "loss": 0.5366, + "step": 9058 + }, + { + "epoch": 5.848289218850871, + "grad_norm": 1.384210350526461, + "learning_rate": 7.677050751157043e-07, + "loss": 0.4443, + "step": 9059 + }, + { + "epoch": 5.848934796642995, + "grad_norm": 1.4309797146473189, + "learning_rate": 7.611908361360164e-07, + "loss": 0.4309, + "step": 9060 + }, + { + "epoch": 5.84958037443512, + "grad_norm": 1.430473521444103, + "learning_rate": 7.547042825048921e-07, + "loss": 0.4262, + "step": 9061 + }, + { + "epoch": 5.850225952227244, + "grad_norm": 1.3413190131877546, + "learning_rate": 7.482454154256524e-07, + "loss": 0.4511, + "step": 9062 + }, + { + "epoch": 5.850871530019368, + "grad_norm": 1.5734499475541686, + "learning_rate": 7.418142360964885e-07, + "loss": 0.4927, + "step": 9063 + }, + { + "epoch": 5.851517107811492, + "grad_norm": 1.3654392771333628, + "learning_rate": 7.354107457104297e-07, + "loss": 0.5049, + "step": 9064 + }, + { + "epoch": 5.8521626856036155, + "grad_norm": 1.4927837708446878, + "learning_rate": 7.290349454554256e-07, + "loss": 0.5069, + "step": 9065 + }, + { + "epoch": 5.8528082633957395, + "grad_norm": 1.3282060093789716, + "learning_rate": 7.226868365142302e-07, + "loss": 0.4437, + "step": 9066 + }, + { + "epoch": 5.853453841187863, + "grad_norm": 1.461921029249473, + "learning_rate": 7.163664200644847e-07, + "loss": 0.4915, + "step": 9067 + }, + { + "epoch": 5.854099418979987, + "grad_norm": 1.8935424624488697, + "learning_rate": 7.100736972787013e-07, + "loss": 0.4391, + "step": 9068 + }, + { + "epoch": 5.854744996772111, + "grad_norm": 1.6113175065599092, + "learning_rate": 7.038086693242295e-07, + "loss": 0.5614, + "step": 9069 + }, + { + "epoch": 5.855390574564235, + "grad_norm": 1.4890115467859673, + "learning_rate": 6.975713373633063e-07, + "loss": 0.5306, + "step": 9070 + }, + { + "epoch": 5.856036152356359, + "grad_norm": 1.2910770099969189, + "learning_rate": 6.913617025530394e-07, + "loss": 0.4492, + "step": 9071 + }, + { + "epoch": 5.856681730148483, + "grad_norm": 1.4800595857988905, + "learning_rate": 6.851797660453406e-07, + "loss": 0.5008, + "step": 9072 + }, + { + "epoch": 5.857327307940607, + "grad_norm": 1.3065847948803833, + "learning_rate": 6.790255289870594e-07, + "loss": 0.4216, + "step": 9073 + }, + { + "epoch": 5.857972885732731, + "grad_norm": 1.2978941117942335, + "learning_rate": 6.728989925198491e-07, + "loss": 0.4226, + "step": 9074 + }, + { + "epoch": 5.858618463524855, + "grad_norm": 1.1469530457828385, + "learning_rate": 6.668001577802673e-07, + "loss": 0.3885, + "step": 9075 + }, + { + "epoch": 5.859264041316979, + "grad_norm": 1.4312959317236096, + "learning_rate": 6.607290258996755e-07, + "loss": 0.52, + "step": 9076 + }, + { + "epoch": 5.859909619109103, + "grad_norm": 1.5584541105929441, + "learning_rate": 6.54685598004373e-07, + "loss": 0.5548, + "step": 9077 + }, + { + "epoch": 5.860555196901227, + "grad_norm": 1.49339089815921, + "learning_rate": 6.486698752154462e-07, + "loss": 0.5096, + "step": 9078 + }, + { + "epoch": 5.8612007746933505, + "grad_norm": 1.354737665960649, + "learning_rate": 6.426818586489025e-07, + "loss": 0.3978, + "step": 9079 + }, + { + "epoch": 5.8618463524854745, + "grad_norm": 1.559024410379652, + "learning_rate": 6.367215494155531e-07, + "loss": 0.4643, + "step": 9080 + }, + { + "epoch": 5.862491930277598, + "grad_norm": 1.2711855374043661, + "learning_rate": 6.307889486211137e-07, + "loss": 0.4519, + "step": 9081 + }, + { + "epoch": 5.863137508069722, + "grad_norm": 1.3354401267134128, + "learning_rate": 6.248840573661373e-07, + "loss": 0.4678, + "step": 9082 + }, + { + "epoch": 5.863783085861846, + "grad_norm": 1.408859516592183, + "learning_rate": 6.190068767460476e-07, + "loss": 0.5049, + "step": 9083 + }, + { + "epoch": 5.86442866365397, + "grad_norm": 1.5880577176612416, + "learning_rate": 6.131574078511225e-07, + "loss": 0.4892, + "step": 9084 + }, + { + "epoch": 5.865074241446094, + "grad_norm": 1.5228496557387223, + "learning_rate": 6.07335651766494e-07, + "loss": 0.558, + "step": 9085 + }, + { + "epoch": 5.865719819238218, + "grad_norm": 1.8655178640585024, + "learning_rate": 6.015416095721481e-07, + "loss": 0.5131, + "step": 9086 + }, + { + "epoch": 5.866365397030342, + "grad_norm": 1.2796892026316862, + "learning_rate": 5.957752823429751e-07, + "loss": 0.4561, + "step": 9087 + }, + { + "epoch": 5.867010974822466, + "grad_norm": 1.5034442930861156, + "learning_rate": 5.900366711486526e-07, + "loss": 0.4447, + "step": 9088 + }, + { + "epoch": 5.86765655261459, + "grad_norm": 1.245587131570389, + "learning_rate": 5.843257770537624e-07, + "loss": 0.438, + "step": 9089 + }, + { + "epoch": 5.868302130406714, + "grad_norm": 1.6165356370085913, + "learning_rate": 5.786426011177403e-07, + "loss": 0.6459, + "step": 9090 + }, + { + "epoch": 5.868947708198838, + "grad_norm": 1.414993807509133, + "learning_rate": 5.729871443948764e-07, + "loss": 0.4759, + "step": 9091 + }, + { + "epoch": 5.869593285990962, + "grad_norm": 1.3844405781307754, + "learning_rate": 5.673594079343147e-07, + "loss": 0.4687, + "step": 9092 + }, + { + "epoch": 5.870238863783086, + "grad_norm": 1.4292573308144403, + "learning_rate": 5.617593927800534e-07, + "loss": 0.4259, + "step": 9093 + }, + { + "epoch": 5.8708844415752095, + "grad_norm": 1.4837393578963423, + "learning_rate": 5.561870999709617e-07, + "loss": 0.5024, + "step": 9094 + }, + { + "epoch": 5.871530019367333, + "grad_norm": 1.3402029089789862, + "learning_rate": 5.506425305407458e-07, + "loss": 0.4324, + "step": 9095 + }, + { + "epoch": 5.872175597159457, + "grad_norm": 1.499243357772558, + "learning_rate": 5.451256855179997e-07, + "loss": 0.5363, + "step": 9096 + }, + { + "epoch": 5.872821174951581, + "grad_norm": 1.5535181053639129, + "learning_rate": 5.396365659261382e-07, + "loss": 0.5501, + "step": 9097 + }, + { + "epoch": 5.873466752743706, + "grad_norm": 1.3659129502299883, + "learning_rate": 5.341751727834464e-07, + "loss": 0.5323, + "step": 9098 + }, + { + "epoch": 5.87411233053583, + "grad_norm": 1.4392476822455051, + "learning_rate": 5.28741507103081e-07, + "loss": 0.5314, + "step": 9099 + }, + { + "epoch": 5.874757908327954, + "grad_norm": 1.38220610854073, + "learning_rate": 5.233355698930519e-07, + "loss": 0.4656, + "step": 9100 + }, + { + "epoch": 5.875403486120078, + "grad_norm": 1.2994672010497599, + "learning_rate": 5.179573621561905e-07, + "loss": 0.4506, + "step": 9101 + }, + { + "epoch": 5.876049063912202, + "grad_norm": 1.6392203005747241, + "learning_rate": 5.12606884890232e-07, + "loss": 0.5237, + "step": 9102 + }, + { + "epoch": 5.876694641704326, + "grad_norm": 1.6140724677562708, + "learning_rate": 5.072841390877324e-07, + "loss": 0.5555, + "step": 9103 + }, + { + "epoch": 5.87734021949645, + "grad_norm": 1.5146934087314496, + "learning_rate": 5.019891257361352e-07, + "loss": 0.5474, + "step": 9104 + }, + { + "epoch": 5.877985797288574, + "grad_norm": 1.3753029608123548, + "learning_rate": 4.967218458177047e-07, + "loss": 0.4915, + "step": 9105 + }, + { + "epoch": 5.8786313750806976, + "grad_norm": 1.409970430609157, + "learning_rate": 4.914823003095758e-07, + "loss": 0.4965, + "step": 9106 + }, + { + "epoch": 5.8792769528728215, + "grad_norm": 1.3518622473048505, + "learning_rate": 4.862704901837211e-07, + "loss": 0.4796, + "step": 9107 + }, + { + "epoch": 5.879922530664945, + "grad_norm": 1.4047365316350096, + "learning_rate": 4.810864164070338e-07, + "loss": 0.5902, + "step": 9108 + }, + { + "epoch": 5.880568108457069, + "grad_norm": 1.4547808978669343, + "learning_rate": 4.7593007994117803e-07, + "loss": 0.5235, + "step": 9109 + }, + { + "epoch": 5.881213686249193, + "grad_norm": 1.3340773908237766, + "learning_rate": 4.7080148174272193e-07, + "loss": 0.4576, + "step": 9110 + }, + { + "epoch": 5.881859264041317, + "grad_norm": 1.2682420886012726, + "learning_rate": 4.657006227630544e-07, + "loss": 0.4284, + "step": 9111 + }, + { + "epoch": 5.882504841833441, + "grad_norm": 1.3648362561366132, + "learning_rate": 4.606275039484686e-07, + "loss": 0.4487, + "step": 9112 + }, + { + "epoch": 5.883150419625565, + "grad_norm": 1.4005563454249543, + "learning_rate": 4.55582126240045e-07, + "loss": 0.4894, + "step": 9113 + }, + { + "epoch": 5.883795997417689, + "grad_norm": 1.4792922359636866, + "learning_rate": 4.505644905737682e-07, + "loss": 0.5898, + "step": 9114 + }, + { + "epoch": 5.884441575209813, + "grad_norm": 1.3259640033804485, + "learning_rate": 4.45574597880477e-07, + "loss": 0.4276, + "step": 9115 + }, + { + "epoch": 5.885087153001937, + "grad_norm": 1.3462760083041196, + "learning_rate": 4.406124490858309e-07, + "loss": 0.4539, + "step": 9116 + }, + { + "epoch": 5.885732730794061, + "grad_norm": 1.6715777331000286, + "learning_rate": 4.3567804511037697e-07, + "loss": 0.6326, + "step": 9117 + }, + { + "epoch": 5.886378308586185, + "grad_norm": 1.357248062469381, + "learning_rate": 4.3077138686946623e-07, + "loss": 0.4766, + "step": 9118 + }, + { + "epoch": 5.887023886378309, + "grad_norm": 1.3770462713802682, + "learning_rate": 4.2589247527337056e-07, + "loss": 0.5452, + "step": 9119 + }, + { + "epoch": 5.887669464170433, + "grad_norm": 1.2767636535793707, + "learning_rate": 4.21041311227166e-07, + "loss": 0.4516, + "step": 9120 + }, + { + "epoch": 5.8883150419625565, + "grad_norm": 1.224698928201583, + "learning_rate": 4.162178956307827e-07, + "loss": 0.4394, + "step": 9121 + }, + { + "epoch": 5.88896061975468, + "grad_norm": 1.7240111571687202, + "learning_rate": 4.1142222937902146e-07, + "loss": 0.5256, + "step": 9122 + }, + { + "epoch": 5.889606197546804, + "grad_norm": 1.3063879517976922, + "learning_rate": 4.066543133615208e-07, + "loss": 0.4302, + "step": 9123 + }, + { + "epoch": 5.890251775338928, + "grad_norm": 1.4028413321003526, + "learning_rate": 4.0191414846280635e-07, + "loss": 0.4865, + "step": 9124 + }, + { + "epoch": 5.890897353131052, + "grad_norm": 1.2512502888036527, + "learning_rate": 3.9720173556220814e-07, + "loss": 0.4284, + "step": 9125 + }, + { + "epoch": 5.891542930923176, + "grad_norm": 1.5120424791201823, + "learning_rate": 3.925170755339102e-07, + "loss": 0.444, + "step": 9126 + }, + { + "epoch": 5.8921885087153, + "grad_norm": 1.538657396231969, + "learning_rate": 3.878601692470007e-07, + "loss": 0.5416, + "step": 9127 + }, + { + "epoch": 5.892834086507424, + "grad_norm": 1.435221477738227, + "learning_rate": 3.832310175653552e-07, + "loss": 0.5308, + "step": 9128 + }, + { + "epoch": 5.893479664299548, + "grad_norm": 1.477586975419481, + "learning_rate": 3.7862962134775356e-07, + "loss": 0.5736, + "step": 9129 + }, + { + "epoch": 5.894125242091672, + "grad_norm": 1.409537086898819, + "learning_rate": 3.740559814477795e-07, + "loss": 0.4968, + "step": 9130 + }, + { + "epoch": 5.894770819883796, + "grad_norm": 1.5675744046144222, + "learning_rate": 3.6951009871390437e-07, + "loss": 0.5729, + "step": 9131 + }, + { + "epoch": 5.89541639767592, + "grad_norm": 1.300292393452484, + "learning_rate": 3.649919739894369e-07, + "loss": 0.4588, + "step": 9132 + }, + { + "epoch": 5.896061975468044, + "grad_norm": 1.4716011043075339, + "learning_rate": 3.605016081125234e-07, + "loss": 0.5714, + "step": 9133 + }, + { + "epoch": 5.896707553260168, + "grad_norm": 1.3127332615939893, + "learning_rate": 3.560390019161974e-07, + "loss": 0.4353, + "step": 9134 + }, + { + "epoch": 5.8973531310522915, + "grad_norm": 1.3609116836724535, + "learning_rate": 3.5160415622829675e-07, + "loss": 0.4644, + "step": 9135 + }, + { + "epoch": 5.8979987088444155, + "grad_norm": 1.3767872572508224, + "learning_rate": 3.4719707187154666e-07, + "loss": 0.5049, + "step": 9136 + }, + { + "epoch": 5.898644286636539, + "grad_norm": 1.49726372718819, + "learning_rate": 3.4281774966349317e-07, + "loss": 0.5468, + "step": 9137 + }, + { + "epoch": 5.899289864428663, + "grad_norm": 1.554083083526722, + "learning_rate": 3.384661904165531e-07, + "loss": 0.4659, + "step": 9138 + }, + { + "epoch": 5.899935442220787, + "grad_norm": 1.346739085239273, + "learning_rate": 3.341423949379807e-07, + "loss": 0.5201, + "step": 9139 + }, + { + "epoch": 5.900581020012911, + "grad_norm": 1.45030557744515, + "learning_rate": 3.2984636402990095e-07, + "loss": 0.4824, + "step": 9140 + }, + { + "epoch": 5.901226597805035, + "grad_norm": 1.460587383217564, + "learning_rate": 3.255780984892431e-07, + "loss": 0.5035, + "step": 9141 + }, + { + "epoch": 5.901872175597159, + "grad_norm": 1.2977137107826624, + "learning_rate": 3.213375991078404e-07, + "loss": 0.4966, + "step": 9142 + }, + { + "epoch": 5.902517753389283, + "grad_norm": 1.1512099493408718, + "learning_rate": 3.1712486667234695e-07, + "loss": 0.4253, + "step": 9143 + }, + { + "epoch": 5.903163331181407, + "grad_norm": 2.408703520653263, + "learning_rate": 3.129399019642542e-07, + "loss": 0.4432, + "step": 9144 + }, + { + "epoch": 5.903808908973531, + "grad_norm": 1.2903567222547623, + "learning_rate": 3.0878270575992456e-07, + "loss": 0.4877, + "step": 9145 + }, + { + "epoch": 5.904454486765655, + "grad_norm": 1.3192142220636678, + "learning_rate": 3.0465327883057446e-07, + "loss": 0.4616, + "step": 9146 + }, + { + "epoch": 5.90510006455778, + "grad_norm": 1.4469596758175194, + "learning_rate": 3.005516219422244e-07, + "loss": 0.5179, + "step": 9147 + }, + { + "epoch": 5.9057456423499035, + "grad_norm": 1.5122065744895437, + "learning_rate": 2.9647773585579906e-07, + "loss": 0.5311, + "step": 9148 + }, + { + "epoch": 5.906391220142027, + "grad_norm": 1.2832776036158664, + "learning_rate": 2.924316213270439e-07, + "loss": 0.4238, + "step": 9149 + }, + { + "epoch": 5.907036797934151, + "grad_norm": 1.345221012397638, + "learning_rate": 2.8841327910655855e-07, + "loss": 0.4299, + "step": 9150 + }, + { + "epoch": 5.907682375726275, + "grad_norm": 1.3733539274177606, + "learning_rate": 2.8442270993976334e-07, + "loss": 0.4831, + "step": 9151 + }, + { + "epoch": 5.908327953518399, + "grad_norm": 1.2332509152606903, + "learning_rate": 2.804599145669828e-07, + "loss": 0.4864, + "step": 9152 + }, + { + "epoch": 5.908973531310523, + "grad_norm": 1.4142667760092051, + "learning_rate": 2.7652489372332887e-07, + "loss": 0.526, + "step": 9153 + }, + { + "epoch": 5.909619109102647, + "grad_norm": 1.5024370358809254, + "learning_rate": 2.726176481388009e-07, + "loss": 0.5531, + "step": 9154 + }, + { + "epoch": 5.910264686894771, + "grad_norm": 1.2871605054847912, + "learning_rate": 2.687381785382359e-07, + "loss": 0.4826, + "step": 9155 + }, + { + "epoch": 5.910910264686895, + "grad_norm": 1.4593631598058343, + "learning_rate": 2.64886485641308e-07, + "loss": 0.5824, + "step": 9156 + }, + { + "epoch": 5.911555842479019, + "grad_norm": 1.4331991985746106, + "learning_rate": 2.610625701625624e-07, + "loss": 0.4903, + "step": 9157 + }, + { + "epoch": 5.912201420271143, + "grad_norm": 1.5790971949043155, + "learning_rate": 2.572664328113483e-07, + "loss": 0.5536, + "step": 9158 + }, + { + "epoch": 5.912846998063267, + "grad_norm": 1.3057155613934286, + "learning_rate": 2.5349807429191904e-07, + "loss": 0.4373, + "step": 9159 + }, + { + "epoch": 5.913492575855391, + "grad_norm": 1.4305273862670937, + "learning_rate": 2.4975749530331546e-07, + "loss": 0.4639, + "step": 9160 + }, + { + "epoch": 5.914138153647515, + "grad_norm": 1.4871692718692182, + "learning_rate": 2.4604469653948247e-07, + "loss": 0.5535, + "step": 9161 + }, + { + "epoch": 5.9147837314396385, + "grad_norm": 1.4998858175163097, + "learning_rate": 2.4235967868916904e-07, + "loss": 0.488, + "step": 9162 + }, + { + "epoch": 5.9154293092317625, + "grad_norm": 1.294999907115873, + "learning_rate": 2.3870244243596183e-07, + "loss": 0.4423, + "step": 9163 + }, + { + "epoch": 5.916074887023886, + "grad_norm": 1.5085713666822065, + "learning_rate": 2.3507298845835133e-07, + "loss": 0.5576, + "step": 9164 + }, + { + "epoch": 5.91672046481601, + "grad_norm": 1.3498408815912917, + "learning_rate": 2.3147131742963232e-07, + "loss": 0.4711, + "step": 9165 + }, + { + "epoch": 5.917366042608134, + "grad_norm": 1.4613825217716838, + "learning_rate": 2.2789743001793703e-07, + "loss": 0.6299, + "step": 9166 + }, + { + "epoch": 5.918011620400258, + "grad_norm": 1.3354779481025956, + "learning_rate": 2.2435132688626845e-07, + "loss": 0.4756, + "step": 9167 + }, + { + "epoch": 5.918657198192382, + "grad_norm": 1.5576728851632764, + "learning_rate": 2.2083300869246702e-07, + "loss": 0.5724, + "step": 9168 + }, + { + "epoch": 5.919302775984506, + "grad_norm": 1.3478680803078293, + "learning_rate": 2.1734247608921062e-07, + "loss": 0.4657, + "step": 9169 + }, + { + "epoch": 5.91994835377663, + "grad_norm": 1.4081597668584473, + "learning_rate": 2.1387972972403133e-07, + "loss": 0.4706, + "step": 9170 + }, + { + "epoch": 5.920593931568754, + "grad_norm": 1.3195703648499975, + "learning_rate": 2.1044477023929863e-07, + "loss": 0.4881, + "step": 9171 + }, + { + "epoch": 5.921239509360878, + "grad_norm": 1.4564664556927378, + "learning_rate": 2.0703759827225275e-07, + "loss": 0.4546, + "step": 9172 + }, + { + "epoch": 5.921885087153002, + "grad_norm": 1.4941636481245464, + "learning_rate": 2.0365821445493813e-07, + "loss": 0.554, + "step": 9173 + }, + { + "epoch": 5.922530664945126, + "grad_norm": 1.3757261142202961, + "learning_rate": 2.003066194142866e-07, + "loss": 0.4244, + "step": 9174 + }, + { + "epoch": 5.92317624273725, + "grad_norm": 1.4812429644551925, + "learning_rate": 1.9698281377203418e-07, + "loss": 0.546, + "step": 9175 + }, + { + "epoch": 5.923821820529374, + "grad_norm": 1.3504360811515161, + "learning_rate": 1.936867981447876e-07, + "loss": 0.497, + "step": 9176 + }, + { + "epoch": 5.9244673983214975, + "grad_norm": 1.3739189573526382, + "learning_rate": 1.9041857314399112e-07, + "loss": 0.5178, + "step": 9177 + }, + { + "epoch": 5.925112976113621, + "grad_norm": 1.2118100881750777, + "learning_rate": 1.8717813937594306e-07, + "loss": 0.4023, + "step": 9178 + }, + { + "epoch": 5.925758553905745, + "grad_norm": 1.4645841924842915, + "learning_rate": 1.8396549744177924e-07, + "loss": 0.5281, + "step": 9179 + }, + { + "epoch": 5.926404131697869, + "grad_norm": 1.489182701224734, + "learning_rate": 1.8078064793745628e-07, + "loss": 0.4671, + "step": 9180 + }, + { + "epoch": 5.927049709489993, + "grad_norm": 1.3841595159936961, + "learning_rate": 1.776235914538182e-07, + "loss": 0.4552, + "step": 9181 + }, + { + "epoch": 5.927695287282117, + "grad_norm": 1.3258456282823998, + "learning_rate": 1.7449432857651325e-07, + "loss": 0.452, + "step": 9182 + }, + { + "epoch": 5.928340865074241, + "grad_norm": 1.4405972088990253, + "learning_rate": 1.7139285988606033e-07, + "loss": 0.5634, + "step": 9183 + }, + { + "epoch": 5.928986442866366, + "grad_norm": 1.6283576942996925, + "learning_rate": 1.6831918595783256e-07, + "loss": 0.5741, + "step": 9184 + }, + { + "epoch": 5.92963202065849, + "grad_norm": 1.4836683645196427, + "learning_rate": 1.6527330736200717e-07, + "loss": 0.5295, + "step": 9185 + }, + { + "epoch": 5.930277598450614, + "grad_norm": 1.3721889050240927, + "learning_rate": 1.6225522466363217e-07, + "loss": 0.5334, + "step": 9186 + }, + { + "epoch": 5.930923176242738, + "grad_norm": 1.3254098551774816, + "learning_rate": 1.5926493842259303e-07, + "loss": 0.5411, + "step": 9187 + }, + { + "epoch": 5.931568754034862, + "grad_norm": 1.3266710686667482, + "learning_rate": 1.5630244919361268e-07, + "loss": 0.4526, + "step": 9188 + }, + { + "epoch": 5.9322143318269855, + "grad_norm": 1.4994884161536426, + "learning_rate": 1.5336775752626818e-07, + "loss": 0.5031, + "step": 9189 + }, + { + "epoch": 5.9328599096191095, + "grad_norm": 1.439180507731254, + "learning_rate": 1.504608639649907e-07, + "loss": 0.5238, + "step": 9190 + }, + { + "epoch": 5.933505487411233, + "grad_norm": 1.4569092287192382, + "learning_rate": 1.4758176904901553e-07, + "loss": 0.4727, + "step": 9191 + }, + { + "epoch": 5.934151065203357, + "grad_norm": 1.2551367781358476, + "learning_rate": 1.447304733124488e-07, + "loss": 0.4521, + "step": 9192 + }, + { + "epoch": 5.934796642995481, + "grad_norm": 1.366058369999978, + "learning_rate": 1.419069772842507e-07, + "loss": 0.5136, + "step": 9193 + }, + { + "epoch": 5.935442220787605, + "grad_norm": 1.4287394379377871, + "learning_rate": 1.3911128148820228e-07, + "loss": 0.5058, + "step": 9194 + }, + { + "epoch": 5.936087798579729, + "grad_norm": 1.4055382714059852, + "learning_rate": 1.36343386442922e-07, + "loss": 0.5691, + "step": 9195 + }, + { + "epoch": 5.936733376371853, + "grad_norm": 1.5324155001623485, + "learning_rate": 1.3360329266189906e-07, + "loss": 0.5126, + "step": 9196 + }, + { + "epoch": 5.937378954163977, + "grad_norm": 1.353730349135656, + "learning_rate": 1.3089100065342695e-07, + "loss": 0.4742, + "step": 9197 + }, + { + "epoch": 5.938024531956101, + "grad_norm": 1.3763473378542668, + "learning_rate": 1.282065109207031e-07, + "loss": 0.4923, + "step": 9198 + }, + { + "epoch": 5.938670109748225, + "grad_norm": 1.2979140133683842, + "learning_rate": 1.2554982396169588e-07, + "loss": 0.453, + "step": 9199 + }, + { + "epoch": 5.939315687540349, + "grad_norm": 1.3870188620832, + "learning_rate": 1.2292094026924436e-07, + "loss": 0.4313, + "step": 9200 + }, + { + "epoch": 5.939961265332473, + "grad_norm": 1.4797007760267844, + "learning_rate": 1.2031986033105844e-07, + "loss": 0.5475, + "step": 9201 + }, + { + "epoch": 5.940606843124597, + "grad_norm": 1.4027418155072053, + "learning_rate": 1.1774658462965215e-07, + "loss": 0.5162, + "step": 9202 + }, + { + "epoch": 5.941252420916721, + "grad_norm": 1.3932642701316278, + "learning_rate": 1.152011136423936e-07, + "loss": 0.5006, + "step": 9203 + }, + { + "epoch": 5.9418979987088445, + "grad_norm": 1.5314076163852146, + "learning_rate": 1.1268344784150508e-07, + "loss": 0.5408, + "step": 9204 + }, + { + "epoch": 5.942543576500968, + "grad_norm": 1.351411446127383, + "learning_rate": 1.1019358769402964e-07, + "loss": 0.451, + "step": 9205 + }, + { + "epoch": 5.943189154293092, + "grad_norm": 1.2722006564105155, + "learning_rate": 1.0773153366184783e-07, + "loss": 0.4616, + "step": 9206 + }, + { + "epoch": 5.943834732085216, + "grad_norm": 1.5187192416619406, + "learning_rate": 1.0529728620172762e-07, + "loss": 0.5531, + "step": 9207 + }, + { + "epoch": 5.94448030987734, + "grad_norm": 1.4525740731268921, + "learning_rate": 1.0289084576522444e-07, + "loss": 0.5709, + "step": 9208 + }, + { + "epoch": 5.945125887669464, + "grad_norm": 1.3701471896273885, + "learning_rate": 1.005122127987812e-07, + "loss": 0.4335, + "step": 9209 + }, + { + "epoch": 5.945771465461588, + "grad_norm": 1.4658482450825663, + "learning_rate": 9.816138774362825e-08, + "loss": 0.5165, + "step": 9210 + }, + { + "epoch": 5.946417043253712, + "grad_norm": 1.3052256975444896, + "learning_rate": 9.583837103588344e-08, + "loss": 0.4607, + "step": 9211 + }, + { + "epoch": 5.947062621045836, + "grad_norm": 1.3006410229344758, + "learning_rate": 9.354316310648536e-08, + "loss": 0.4538, + "step": 9212 + }, + { + "epoch": 5.94770819883796, + "grad_norm": 1.679589284995161, + "learning_rate": 9.12757643812434e-08, + "loss": 0.582, + "step": 9213 + }, + { + "epoch": 5.948353776630084, + "grad_norm": 1.3476998020660806, + "learning_rate": 8.903617528073781e-08, + "loss": 0.4162, + "step": 9214 + }, + { + "epoch": 5.948999354422208, + "grad_norm": 1.5122110325289868, + "learning_rate": 8.68243962204862e-08, + "loss": 0.5299, + "step": 9215 + }, + { + "epoch": 5.949644932214332, + "grad_norm": 1.389552915608558, + "learning_rate": 8.46404276107604e-08, + "loss": 0.5128, + "step": 9216 + }, + { + "epoch": 5.950290510006456, + "grad_norm": 1.6482905029722525, + "learning_rate": 8.248426985671962e-08, + "loss": 0.6321, + "step": 9217 + }, + { + "epoch": 5.9509360877985795, + "grad_norm": 1.325622940688177, + "learning_rate": 8.03559233583606e-08, + "loss": 0.4918, + "step": 9218 + }, + { + "epoch": 5.9515816655907035, + "grad_norm": 1.4178909434523548, + "learning_rate": 7.825538851050084e-08, + "loss": 0.4836, + "step": 9219 + }, + { + "epoch": 5.952227243382827, + "grad_norm": 1.3315102207647023, + "learning_rate": 7.618266570282861e-08, + "loss": 0.4524, + "step": 9220 + }, + { + "epoch": 5.952872821174951, + "grad_norm": 1.306377458318701, + "learning_rate": 7.413775531983634e-08, + "loss": 0.4893, + "step": 9221 + }, + { + "epoch": 5.953518398967075, + "grad_norm": 1.4726379985599973, + "learning_rate": 7.212065774088727e-08, + "loss": 0.5406, + "step": 9222 + }, + { + "epoch": 5.954163976759199, + "grad_norm": 1.4361240455977973, + "learning_rate": 7.013137334016539e-08, + "loss": 0.4641, + "step": 9223 + }, + { + "epoch": 5.954809554551323, + "grad_norm": 1.517225570868299, + "learning_rate": 6.81699024867255e-08, + "loss": 0.5064, + "step": 9224 + }, + { + "epoch": 5.955455132343447, + "grad_norm": 1.3629839180773613, + "learning_rate": 6.623624554440987e-08, + "loss": 0.4534, + "step": 9225 + }, + { + "epoch": 5.956100710135571, + "grad_norm": 1.4124769858157307, + "learning_rate": 6.433040287194823e-08, + "loss": 0.5274, + "step": 9226 + }, + { + "epoch": 5.956746287927695, + "grad_norm": 1.343948209876184, + "learning_rate": 6.245237482290777e-08, + "loss": 0.4794, + "step": 9227 + }, + { + "epoch": 5.957391865719819, + "grad_norm": 1.4803432696128807, + "learning_rate": 6.06021617456598e-08, + "loss": 0.5722, + "step": 9228 + }, + { + "epoch": 5.958037443511943, + "grad_norm": 1.4106187076404777, + "learning_rate": 5.8779763983429764e-08, + "loss": 0.4879, + "step": 9229 + }, + { + "epoch": 5.958683021304067, + "grad_norm": 1.6202062440256646, + "learning_rate": 5.698518187433054e-08, + "loss": 0.513, + "step": 9230 + }, + { + "epoch": 5.959328599096191, + "grad_norm": 1.3916793720916485, + "learning_rate": 5.521841575124586e-08, + "loss": 0.4646, + "step": 9231 + }, + { + "epoch": 5.9599741768883145, + "grad_norm": 1.448267139492774, + "learning_rate": 5.34794659419302e-08, + "loss": 0.5354, + "step": 9232 + }, + { + "epoch": 5.960619754680439, + "grad_norm": 1.4100330159903052, + "learning_rate": 5.176833276899217e-08, + "loss": 0.5708, + "step": 9233 + }, + { + "epoch": 5.961265332472563, + "grad_norm": 1.6440491043431846, + "learning_rate": 5.0085016549844536e-08, + "loss": 0.6028, + "step": 9234 + }, + { + "epoch": 5.961910910264687, + "grad_norm": 1.4500907700497085, + "learning_rate": 4.8429517596787485e-08, + "loss": 0.5265, + "step": 9235 + }, + { + "epoch": 5.962556488056811, + "grad_norm": 1.4327555623275676, + "learning_rate": 4.680183621689204e-08, + "loss": 0.4755, + "step": 9236 + }, + { + "epoch": 5.963202065848935, + "grad_norm": 1.4172553525306988, + "learning_rate": 4.520197271214998e-08, + "loss": 0.5209, + "step": 9237 + }, + { + "epoch": 5.963847643641059, + "grad_norm": 1.1452398052108606, + "learning_rate": 4.362992737934057e-08, + "loss": 0.4172, + "step": 9238 + }, + { + "epoch": 5.964493221433183, + "grad_norm": 1.3439847342478086, + "learning_rate": 4.2085700510080535e-08, + "loss": 0.5098, + "step": 9239 + }, + { + "epoch": 5.965138799225307, + "grad_norm": 1.3447638506789137, + "learning_rate": 4.0569292390857376e-08, + "loss": 0.4971, + "step": 9240 + }, + { + "epoch": 5.965784377017431, + "grad_norm": 1.2609459610100824, + "learning_rate": 3.908070330297941e-08, + "loss": 0.517, + "step": 9241 + }, + { + "epoch": 5.966429954809555, + "grad_norm": 1.5172504545832544, + "learning_rate": 3.761993352257575e-08, + "loss": 0.5137, + "step": 9242 + }, + { + "epoch": 5.967075532601679, + "grad_norm": 1.4153677251998433, + "learning_rate": 3.618698332064629e-08, + "loss": 0.5247, + "step": 9243 + }, + { + "epoch": 5.967721110393803, + "grad_norm": 1.4932565483526092, + "learning_rate": 3.478185296302838e-08, + "loss": 0.5543, + "step": 9244 + }, + { + "epoch": 5.9683666881859265, + "grad_norm": 1.7394870762494212, + "learning_rate": 3.3404542710380176e-08, + "loss": 0.4492, + "step": 9245 + }, + { + "epoch": 5.9690122659780505, + "grad_norm": 1.376480720401074, + "learning_rate": 3.205505281821397e-08, + "loss": 0.4519, + "step": 9246 + }, + { + "epoch": 5.969657843770174, + "grad_norm": 1.4763586408529707, + "learning_rate": 3.073338353686283e-08, + "loss": 0.5155, + "step": 9247 + }, + { + "epoch": 5.970303421562298, + "grad_norm": 1.4106018064079386, + "learning_rate": 2.9439535111497324e-08, + "loss": 0.4585, + "step": 9248 + }, + { + "epoch": 5.970948999354422, + "grad_norm": 1.3959452808684845, + "learning_rate": 2.817350778217542e-08, + "loss": 0.4894, + "step": 9249 + }, + { + "epoch": 5.971594577146546, + "grad_norm": 1.3298601587125802, + "learning_rate": 2.6935301783742592e-08, + "loss": 0.498, + "step": 9250 + }, + { + "epoch": 5.97224015493867, + "grad_norm": 1.287349579865924, + "learning_rate": 2.572491734588178e-08, + "loss": 0.4562, + "step": 9251 + }, + { + "epoch": 5.972885732730794, + "grad_norm": 1.3277956635760098, + "learning_rate": 2.4542354693146694e-08, + "loss": 0.4134, + "step": 9252 + }, + { + "epoch": 5.973531310522918, + "grad_norm": 1.5148744499463942, + "learning_rate": 2.3387614044928504e-08, + "loss": 0.5107, + "step": 9253 + }, + { + "epoch": 5.974176888315042, + "grad_norm": 1.220065321893271, + "learning_rate": 2.226069561542254e-08, + "loss": 0.4202, + "step": 9254 + }, + { + "epoch": 5.974822466107166, + "grad_norm": 1.3243615337465786, + "learning_rate": 2.1161599613678248e-08, + "loss": 0.4833, + "step": 9255 + }, + { + "epoch": 5.97546804389929, + "grad_norm": 1.378195290859322, + "learning_rate": 2.009032624361584e-08, + "loss": 0.3977, + "step": 9256 + }, + { + "epoch": 5.976113621691414, + "grad_norm": 1.30840942731629, + "learning_rate": 1.9046875703959684e-08, + "loss": 0.4383, + "step": 9257 + }, + { + "epoch": 5.976759199483538, + "grad_norm": 1.3091454682110546, + "learning_rate": 1.8031248188254966e-08, + "loss": 0.4713, + "step": 9258 + }, + { + "epoch": 5.9774047772756616, + "grad_norm": 1.4954959608129186, + "learning_rate": 1.7043443884950934e-08, + "loss": 0.5476, + "step": 9259 + }, + { + "epoch": 5.9780503550677855, + "grad_norm": 1.3960088768315675, + "learning_rate": 1.608346297725105e-08, + "loss": 0.479, + "step": 9260 + }, + { + "epoch": 5.978695932859909, + "grad_norm": 1.6046530294345756, + "learning_rate": 1.515130564329614e-08, + "loss": 0.4959, + "step": 9261 + }, + { + "epoch": 5.979341510652033, + "grad_norm": 1.2652634427621907, + "learning_rate": 1.424697205596459e-08, + "loss": 0.4436, + "step": 9262 + }, + { + "epoch": 5.979987088444157, + "grad_norm": 1.239561643885119, + "learning_rate": 1.3370462383038849e-08, + "loss": 0.4489, + "step": 9263 + }, + { + "epoch": 5.980632666236281, + "grad_norm": 1.2596873742920922, + "learning_rate": 1.2521776787105531e-08, + "loss": 0.4388, + "step": 9264 + }, + { + "epoch": 5.981278244028405, + "grad_norm": 1.4317367431659418, + "learning_rate": 1.1700915425638668e-08, + "loss": 0.5014, + "step": 9265 + }, + { + "epoch": 5.981923821820529, + "grad_norm": 1.252001733833299, + "learning_rate": 1.0907878450883145e-08, + "loss": 0.4476, + "step": 9266 + }, + { + "epoch": 5.982569399612653, + "grad_norm": 1.3394869650897652, + "learning_rate": 1.0142666009954614e-08, + "loss": 0.4617, + "step": 9267 + }, + { + "epoch": 5.983214977404777, + "grad_norm": 1.1821333237766087, + "learning_rate": 9.405278244839499e-09, + "loss": 0.4593, + "step": 9268 + }, + { + "epoch": 5.983860555196901, + "grad_norm": 1.326136308248476, + "learning_rate": 8.695715292311723e-09, + "loss": 0.4857, + "step": 9269 + }, + { + "epoch": 5.984506132989026, + "grad_norm": 1.5123309204527815, + "learning_rate": 8.013977283999328e-09, + "loss": 0.5199, + "step": 9270 + }, + { + "epoch": 5.98515171078115, + "grad_norm": 1.3295698103744953, + "learning_rate": 7.360064346367822e-09, + "loss": 0.4312, + "step": 9271 + }, + { + "epoch": 5.9857972885732735, + "grad_norm": 1.5335453150097422, + "learning_rate": 6.733976600736824e-09, + "loss": 0.4952, + "step": 9272 + }, + { + "epoch": 5.9864428663653975, + "grad_norm": 1.3815609626132228, + "learning_rate": 6.135714163246763e-09, + "loss": 0.4096, + "step": 9273 + }, + { + "epoch": 5.987088444157521, + "grad_norm": 1.4414474475565027, + "learning_rate": 5.565277144892188e-09, + "loss": 0.4603, + "step": 9274 + }, + { + "epoch": 5.987734021949645, + "grad_norm": 1.366890807382713, + "learning_rate": 5.022665651471802e-09, + "loss": 0.4793, + "step": 9275 + }, + { + "epoch": 5.988379599741769, + "grad_norm": 1.4261504706405992, + "learning_rate": 4.5078797836717305e-09, + "loss": 0.553, + "step": 9276 + }, + { + "epoch": 5.989025177533893, + "grad_norm": 1.4729169743485517, + "learning_rate": 4.020919636965602e-09, + "loss": 0.5247, + "step": 9277 + }, + { + "epoch": 5.989670755326017, + "grad_norm": 1.4590547587183684, + "learning_rate": 3.561785301697817e-09, + "loss": 0.5051, + "step": 9278 + }, + { + "epoch": 5.990316333118141, + "grad_norm": 1.7274875501860705, + "learning_rate": 3.1304768630668885e-09, + "loss": 0.4656, + "step": 9279 + }, + { + "epoch": 5.990961910910265, + "grad_norm": 2.5854445034784033, + "learning_rate": 2.726994401042182e-09, + "loss": 0.5292, + "step": 9280 + }, + { + "epoch": 5.991607488702389, + "grad_norm": 1.4399738494472396, + "learning_rate": 2.3513379905137906e-09, + "loss": 0.4663, + "step": 9281 + }, + { + "epoch": 5.992253066494513, + "grad_norm": 1.3012921586278092, + "learning_rate": 2.0035077011426594e-09, + "loss": 0.44, + "step": 9282 + }, + { + "epoch": 5.992898644286637, + "grad_norm": 1.3398319192184556, + "learning_rate": 1.6835035974605004e-09, + "loss": 0.4686, + "step": 9283 + }, + { + "epoch": 5.993544222078761, + "grad_norm": 1.2713034870322968, + "learning_rate": 1.3913257388531441e-09, + "loss": 0.4926, + "step": 9284 + }, + { + "epoch": 5.994189799870885, + "grad_norm": 1.3506992953337171, + "learning_rate": 1.126974179477269e-09, + "loss": 0.4651, + "step": 9285 + }, + { + "epoch": 5.994835377663009, + "grad_norm": 1.289879926160096, + "learning_rate": 8.904489684269378e-10, + "loss": 0.4478, + "step": 9286 + }, + { + "epoch": 5.9954809554551325, + "grad_norm": 1.1833908025602833, + "learning_rate": 6.817501495337552e-10, + "loss": 0.4308, + "step": 9287 + }, + { + "epoch": 5.996126533247256, + "grad_norm": 1.323269483196461, + "learning_rate": 5.008777615500559e-10, + "loss": 0.4769, + "step": 9288 + }, + { + "epoch": 5.99677211103938, + "grad_norm": 1.3431177320767005, + "learning_rate": 3.4783183799902415e-10, + "loss": 0.4311, + "step": 9289 + }, + { + "epoch": 5.997417688831504, + "grad_norm": 1.3007543143827616, + "learning_rate": 2.2261240729126717e-10, + "loss": 0.4627, + "step": 9290 + }, + { + "epoch": 5.998063266623628, + "grad_norm": 1.5810011012524356, + "learning_rate": 1.2521949265820175e-10, + "loss": 0.569, + "step": 9291 + }, + { + "epoch": 5.998708844415752, + "grad_norm": 1.3188130357230734, + "learning_rate": 5.5653112152054256e-11, + "loss": 0.4003, + "step": 9292 + }, + { + "epoch": 5.999354422207876, + "grad_norm": 1.368931728289847, + "learning_rate": 1.391327867916736e-11, + "loss": 0.5071, + "step": 9293 + }, + { + "epoch": 6.0, + "grad_norm": 1.3957592302437685, + "learning_rate": 0.0, + "loss": 0.5055, + "step": 9294 + }, + { + "epoch": 6.0, + "eval_loss": 2.6247663497924805, + "eval_runtime": 58.358, + "eval_samples_per_second": 5.946, + "eval_steps_per_second": 5.946, + "step": 9294 + }, + { + "epoch": 6.0, + "step": 9294, + "total_flos": 51748420976640.0, + "train_loss": 1.7242380489751759, + "train_runtime": 26284.2052, + "train_samples_per_second": 0.354, + "train_steps_per_second": 0.354 + } + ], + "logging_steps": 1.0, + "max_steps": 9294, + "num_input_tokens_seen": 0, + "num_train_epochs": 6, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 51748420976640.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}