| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1374, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002186987424822307, | |
| "grad_norm": 0.019816486164927483, | |
| "learning_rate": 0.0, | |
| "loss": 0.6513, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.004373974849644614, | |
| "grad_norm": 0.02367558144032955, | |
| "learning_rate": 2.1739130434782607e-06, | |
| "loss": 0.6461, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0065609622744669215, | |
| "grad_norm": 0.016746528446674347, | |
| "learning_rate": 4.347826086956521e-06, | |
| "loss": 0.5865, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.008747949699289229, | |
| "grad_norm": 0.028324777260422707, | |
| "learning_rate": 6.521739130434782e-06, | |
| "loss": 0.4125, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.010934937124111536, | |
| "grad_norm": 0.024395287036895752, | |
| "learning_rate": 8.695652173913043e-06, | |
| "loss": 0.6942, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.013121924548933843, | |
| "grad_norm": 0.021644193679094315, | |
| "learning_rate": 1.0869565217391303e-05, | |
| "loss": 0.9391, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01530891197375615, | |
| "grad_norm": 0.04051661118865013, | |
| "learning_rate": 1.3043478260869564e-05, | |
| "loss": 0.6903, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.017495899398578457, | |
| "grad_norm": 0.02317817509174347, | |
| "learning_rate": 1.5217391304347826e-05, | |
| "loss": 0.7229, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.019682886823400764, | |
| "grad_norm": 0.016089174896478653, | |
| "learning_rate": 1.7391304347826085e-05, | |
| "loss": 0.9602, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.02186987424822307, | |
| "grad_norm": 0.029251758009195328, | |
| "learning_rate": 1.9565217391304346e-05, | |
| "loss": 0.9696, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02405686167304538, | |
| "grad_norm": 0.02963915839791298, | |
| "learning_rate": 2.1739130434782607e-05, | |
| "loss": 0.8149, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.026243849097867686, | |
| "grad_norm": 0.02249906025826931, | |
| "learning_rate": 2.3913043478260864e-05, | |
| "loss": 0.8597, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.028430836522689993, | |
| "grad_norm": 0.038812343031167984, | |
| "learning_rate": 2.6086956521739128e-05, | |
| "loss": 0.6965, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.0306178239475123, | |
| "grad_norm": 0.1395931988954544, | |
| "learning_rate": 2.826086956521739e-05, | |
| "loss": 0.7026, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03280481137233461, | |
| "grad_norm": 0.04644119739532471, | |
| "learning_rate": 3.0434782608695653e-05, | |
| "loss": 0.5772, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.034991798797156914, | |
| "grad_norm": 0.038876552134752274, | |
| "learning_rate": 3.260869565217391e-05, | |
| "loss": 0.6508, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.037178786221979225, | |
| "grad_norm": 0.031183617189526558, | |
| "learning_rate": 3.478260869565217e-05, | |
| "loss": 0.7206, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.03936577364680153, | |
| "grad_norm": 0.02312302775681019, | |
| "learning_rate": 3.695652173913043e-05, | |
| "loss": 0.6714, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.04155276107162384, | |
| "grad_norm": 0.022964971140027046, | |
| "learning_rate": 3.913043478260869e-05, | |
| "loss": 1.0436, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.04373974849644614, | |
| "grad_norm": 0.0303932037204504, | |
| "learning_rate": 4.130434782608695e-05, | |
| "loss": 0.7183, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.045926735921268454, | |
| "grad_norm": 0.04829512909054756, | |
| "learning_rate": 4.3478260869565214e-05, | |
| "loss": 0.8273, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04811372334609076, | |
| "grad_norm": 0.03576242923736572, | |
| "learning_rate": 4.5652173913043474e-05, | |
| "loss": 0.7451, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.05030071077091307, | |
| "grad_norm": 0.0263582281768322, | |
| "learning_rate": 4.782608695652173e-05, | |
| "loss": 0.7812, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.05248769819573537, | |
| "grad_norm": 0.022913772612810135, | |
| "learning_rate": 4.9999999999999996e-05, | |
| "loss": 0.9077, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.05467468562055768, | |
| "grad_norm": 0.026295918971300125, | |
| "learning_rate": 5.2173913043478256e-05, | |
| "loss": 0.5756, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.056861673045379986, | |
| "grad_norm": 0.05335557088255882, | |
| "learning_rate": 5.434782608695652e-05, | |
| "loss": 0.6425, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.0590486604702023, | |
| "grad_norm": 0.03465011343359947, | |
| "learning_rate": 5.652173913043478e-05, | |
| "loss": 1.1668, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.0612356478950246, | |
| "grad_norm": 0.028022659942507744, | |
| "learning_rate": 5.869565217391304e-05, | |
| "loss": 0.661, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.0634226353198469, | |
| "grad_norm": 0.0726475864648819, | |
| "learning_rate": 6.0869565217391306e-05, | |
| "loss": 0.6718, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.06560962274466922, | |
| "grad_norm": 0.03730878606438637, | |
| "learning_rate": 6.304347826086956e-05, | |
| "loss": 0.743, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06779661016949153, | |
| "grad_norm": 0.08332774043083191, | |
| "learning_rate": 6.521739130434782e-05, | |
| "loss": 0.9742, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.06998359759431383, | |
| "grad_norm": 0.05056551471352577, | |
| "learning_rate": 6.739130434782608e-05, | |
| "loss": 1.0044, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07217058501913615, | |
| "grad_norm": 0.04123561084270477, | |
| "learning_rate": 6.956521739130434e-05, | |
| "loss": 0.7357, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.07435757244395845, | |
| "grad_norm": 0.03361466899514198, | |
| "learning_rate": 7.17391304347826e-05, | |
| "loss": 0.9187, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.07654455986878075, | |
| "grad_norm": 0.07812534272670746, | |
| "learning_rate": 7.391304347826086e-05, | |
| "loss": 0.6135, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.07873154729360306, | |
| "grad_norm": 0.036363691091537476, | |
| "learning_rate": 7.608695652173912e-05, | |
| "loss": 0.6875, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08091853471842538, | |
| "grad_norm": 0.07220917195081711, | |
| "learning_rate": 7.826086956521738e-05, | |
| "loss": 0.8439, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.08310552214324768, | |
| "grad_norm": 0.08875200897455215, | |
| "learning_rate": 8.043478260869566e-05, | |
| "loss": 0.838, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.08529250956806998, | |
| "grad_norm": 0.0466715507209301, | |
| "learning_rate": 8.26086956521739e-05, | |
| "loss": 0.7609, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.08747949699289229, | |
| "grad_norm": 0.06119077280163765, | |
| "learning_rate": 8.478260869565217e-05, | |
| "loss": 0.5831, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.0896664844177146, | |
| "grad_norm": 0.033799510449171066, | |
| "learning_rate": 8.695652173913043e-05, | |
| "loss": 0.7551, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.09185347184253691, | |
| "grad_norm": 0.0656280517578125, | |
| "learning_rate": 8.913043478260869e-05, | |
| "loss": 0.7214, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.09404045926735921, | |
| "grad_norm": 0.07249666750431061, | |
| "learning_rate": 9.130434782608695e-05, | |
| "loss": 0.7571, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.09622744669218151, | |
| "grad_norm": 0.05747272074222565, | |
| "learning_rate": 9.347826086956521e-05, | |
| "loss": 0.7411, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.09841443411700383, | |
| "grad_norm": 0.06284407526254654, | |
| "learning_rate": 9.565217391304346e-05, | |
| "loss": 0.6705, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.10060142154182614, | |
| "grad_norm": 0.08808895945549011, | |
| "learning_rate": 9.782608695652173e-05, | |
| "loss": 0.7999, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.10278840896664844, | |
| "grad_norm": 0.04671047255396843, | |
| "learning_rate": 9.999999999999999e-05, | |
| "loss": 0.7769, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.10497539639147074, | |
| "grad_norm": 0.04966476559638977, | |
| "learning_rate": 0.00010217391304347825, | |
| "loss": 0.7384, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.10716238381629306, | |
| "grad_norm": 0.03611300513148308, | |
| "learning_rate": 0.00010434782608695651, | |
| "loss": 1.3535, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.10934937124111536, | |
| "grad_norm": 0.07358794659376144, | |
| "learning_rate": 0.00010652173913043477, | |
| "loss": 0.636, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11153635866593767, | |
| "grad_norm": 0.0816405862569809, | |
| "learning_rate": 0.00010869565217391303, | |
| "loss": 0.9147, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.11372334609075997, | |
| "grad_norm": 0.027666306123137474, | |
| "learning_rate": 0.00011086956521739128, | |
| "loss": 0.8123, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.11591033351558229, | |
| "grad_norm": 0.05469057708978653, | |
| "learning_rate": 0.00011304347826086956, | |
| "loss": 0.5093, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.1180973209404046, | |
| "grad_norm": 0.06001636013388634, | |
| "learning_rate": 0.00011521739130434782, | |
| "loss": 0.5906, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.1202843083652269, | |
| "grad_norm": 0.11656013131141663, | |
| "learning_rate": 0.00011739130434782608, | |
| "loss": 0.7212, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.1224712957900492, | |
| "grad_norm": 0.05737491697072983, | |
| "learning_rate": 0.00011956521739130434, | |
| "loss": 0.9612, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.12465828321487152, | |
| "grad_norm": 0.08259277790784836, | |
| "learning_rate": 0.00012173913043478261, | |
| "loss": 0.607, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1268452706396938, | |
| "grad_norm": 0.08760760724544525, | |
| "learning_rate": 0.00012391304347826086, | |
| "loss": 0.7577, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.12903225806451613, | |
| "grad_norm": 0.0668894574046135, | |
| "learning_rate": 0.00012608695652173912, | |
| "loss": 0.6312, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.13121924548933844, | |
| "grad_norm": 0.11188331991434097, | |
| "learning_rate": 0.00012826086956521738, | |
| "loss": 0.7738, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.13340623291416073, | |
| "grad_norm": 0.1832694411277771, | |
| "learning_rate": 0.00013043478260869564, | |
| "loss": 0.752, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.13559322033898305, | |
| "grad_norm": 0.08274513483047485, | |
| "learning_rate": 0.0001326086956521739, | |
| "loss": 0.4737, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.13778020776380537, | |
| "grad_norm": 0.12250404059886932, | |
| "learning_rate": 0.00013478260869565216, | |
| "loss": 1.1355, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.13996719518862766, | |
| "grad_norm": 0.09029451757669449, | |
| "learning_rate": 0.00013695652173913042, | |
| "loss": 0.829, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.14215418261344998, | |
| "grad_norm": 0.07868961989879608, | |
| "learning_rate": 0.00013913043478260868, | |
| "loss": 0.6539, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.1443411700382723, | |
| "grad_norm": 0.05902542173862457, | |
| "learning_rate": 0.00014130434782608694, | |
| "loss": 0.7102, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.14652815746309458, | |
| "grad_norm": 0.056226056069135666, | |
| "learning_rate": 0.0001434782608695652, | |
| "loss": 0.4959, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.1487151448879169, | |
| "grad_norm": 0.0737537145614624, | |
| "learning_rate": 0.00014565217391304347, | |
| "loss": 0.6453, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.1509021323127392, | |
| "grad_norm": 0.06562914699316025, | |
| "learning_rate": 0.00014782608695652173, | |
| "loss": 0.5184, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1530891197375615, | |
| "grad_norm": 0.047399215400218964, | |
| "learning_rate": 0.00015, | |
| "loss": 0.7722, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.15527610716238383, | |
| "grad_norm": 0.06085558980703354, | |
| "learning_rate": 0.00015217391304347825, | |
| "loss": 0.5718, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.15746309458720611, | |
| "grad_norm": 0.08985075354576111, | |
| "learning_rate": 0.00015434782608695648, | |
| "loss": 0.7404, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.15965008201202843, | |
| "grad_norm": 0.07962161302566528, | |
| "learning_rate": 0.00015652173913043477, | |
| "loss": 0.5717, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.16183706943685075, | |
| "grad_norm": 0.10055044293403625, | |
| "learning_rate": 0.00015869565217391303, | |
| "loss": 0.7359, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.16402405686167304, | |
| "grad_norm": 0.0740840807557106, | |
| "learning_rate": 0.00016086956521739132, | |
| "loss": 0.5564, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.16621104428649536, | |
| "grad_norm": 0.10917298495769501, | |
| "learning_rate": 0.00016304347826086955, | |
| "loss": 1.0051, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.16839803171131765, | |
| "grad_norm": 0.09235462546348572, | |
| "learning_rate": 0.0001652173913043478, | |
| "loss": 0.6685, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.17058501913613996, | |
| "grad_norm": 0.09104294329881668, | |
| "learning_rate": 0.00016739130434782607, | |
| "loss": 0.7589, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.17277200656096228, | |
| "grad_norm": 0.06644225120544434, | |
| "learning_rate": 0.00016956521739130433, | |
| "loss": 0.7341, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.17495899398578457, | |
| "grad_norm": 0.1140175461769104, | |
| "learning_rate": 0.0001717391304347826, | |
| "loss": 0.4718, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1771459814106069, | |
| "grad_norm": 0.08415459096431732, | |
| "learning_rate": 0.00017391304347826085, | |
| "loss": 0.5123, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.1793329688354292, | |
| "grad_norm": 0.09409467875957489, | |
| "learning_rate": 0.00017608695652173914, | |
| "loss": 0.8004, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.1815199562602515, | |
| "grad_norm": 0.06406699866056442, | |
| "learning_rate": 0.00017826086956521738, | |
| "loss": 0.7602, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.18370694368507381, | |
| "grad_norm": 0.08316787332296371, | |
| "learning_rate": 0.00018043478260869564, | |
| "loss": 0.5476, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.1858939311098961, | |
| "grad_norm": 0.12399126589298248, | |
| "learning_rate": 0.0001826086956521739, | |
| "loss": 0.9202, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.18808091853471842, | |
| "grad_norm": 0.055993784219026566, | |
| "learning_rate": 0.00018478260869565216, | |
| "loss": 0.7112, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.19026790595954074, | |
| "grad_norm": 0.05300869792699814, | |
| "learning_rate": 0.00018695652173913042, | |
| "loss": 0.5599, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.19245489338436303, | |
| "grad_norm": 0.0660649836063385, | |
| "learning_rate": 0.00018913043478260868, | |
| "loss": 0.6456, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.19464188080918535, | |
| "grad_norm": 0.0759139358997345, | |
| "learning_rate": 0.0001913043478260869, | |
| "loss": 0.5772, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.19682886823400766, | |
| "grad_norm": 0.08481380343437195, | |
| "learning_rate": 0.0001934782608695652, | |
| "loss": 0.5918, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.19901585565882995, | |
| "grad_norm": 0.04496153071522713, | |
| "learning_rate": 0.00019565217391304346, | |
| "loss": 0.5967, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.20120284308365227, | |
| "grad_norm": 0.06545284390449524, | |
| "learning_rate": 0.00019782608695652172, | |
| "loss": 0.7882, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2033898305084746, | |
| "grad_norm": 0.0938073992729187, | |
| "learning_rate": 0.00019999999999999998, | |
| "loss": 0.5598, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.20557681793329688, | |
| "grad_norm": 0.08632558584213257, | |
| "learning_rate": 0.00020217391304347824, | |
| "loss": 0.687, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2077638053581192, | |
| "grad_norm": 0.053765781223773956, | |
| "learning_rate": 0.0002043478260869565, | |
| "loss": 0.6326, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2099507927829415, | |
| "grad_norm": 0.3586576282978058, | |
| "learning_rate": 0.00020652173913043474, | |
| "loss": 0.6559, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2121377802077638, | |
| "grad_norm": 0.10852506011724472, | |
| "learning_rate": 0.00020869565217391303, | |
| "loss": 0.5066, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.21432476763258612, | |
| "grad_norm": 0.09538215398788452, | |
| "learning_rate": 0.00021086956521739129, | |
| "loss": 0.6338, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2165117550574084, | |
| "grad_norm": 0.078074149787426, | |
| "learning_rate": 0.00021304347826086955, | |
| "loss": 0.7748, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.21869874248223073, | |
| "grad_norm": 0.07761886715888977, | |
| "learning_rate": 0.0002152173913043478, | |
| "loss": 0.6391, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.22088572990705305, | |
| "grad_norm": 0.08739285171031952, | |
| "learning_rate": 0.00021739130434782607, | |
| "loss": 0.621, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.22307271733187534, | |
| "grad_norm": 0.10503433644771576, | |
| "learning_rate": 0.00021956521739130433, | |
| "loss": 0.7355, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.22525970475669765, | |
| "grad_norm": 0.08183517307043076, | |
| "learning_rate": 0.00022173913043478256, | |
| "loss": 0.9783, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.22744669218151994, | |
| "grad_norm": 0.1349480152130127, | |
| "learning_rate": 0.00022391304347826085, | |
| "loss": 0.7544, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.22963367960634226, | |
| "grad_norm": 0.07376862317323685, | |
| "learning_rate": 0.0002260869565217391, | |
| "loss": 0.5361, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.23182066703116458, | |
| "grad_norm": 0.04610562324523926, | |
| "learning_rate": 0.0002282608695652174, | |
| "loss": 0.4957, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.23400765445598687, | |
| "grad_norm": 0.045114483684301376, | |
| "learning_rate": 0.00023043478260869563, | |
| "loss": 0.6601, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.2361946418808092, | |
| "grad_norm": 0.06456376612186432, | |
| "learning_rate": 0.0002326086956521739, | |
| "loss": 0.6509, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.2383816293056315, | |
| "grad_norm": 0.10435660928487778, | |
| "learning_rate": 0.00023478260869565215, | |
| "loss": 0.7368, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.2405686167304538, | |
| "grad_norm": 0.08688430488109589, | |
| "learning_rate": 0.00023695652173913041, | |
| "loss": 0.6227, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2427556041552761, | |
| "grad_norm": 0.060819629579782486, | |
| "learning_rate": 0.00023913043478260867, | |
| "loss": 0.7475, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.2449425915800984, | |
| "grad_norm": 0.05878061801195145, | |
| "learning_rate": 0.00024130434782608694, | |
| "loss": 0.5565, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.24712957900492072, | |
| "grad_norm": 0.07892751693725586, | |
| "learning_rate": 0.00024347826086956522, | |
| "loss": 0.6662, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.24931656642974304, | |
| "grad_norm": 0.07645498961210251, | |
| "learning_rate": 0.00024565217391304343, | |
| "loss": 0.5861, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.25150355385456535, | |
| "grad_norm": 0.07400215417146683, | |
| "learning_rate": 0.0002478260869565217, | |
| "loss": 0.701, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.2536905412793876, | |
| "grad_norm": 0.06814702600240707, | |
| "learning_rate": 0.00025, | |
| "loss": 0.7172, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.25587752870420993, | |
| "grad_norm": 0.14431889355182648, | |
| "learning_rate": 0.00025217391304347824, | |
| "loss": 0.6619, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.25806451612903225, | |
| "grad_norm": 0.07565838098526001, | |
| "learning_rate": 0.00025434782608695647, | |
| "loss": 0.6557, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.26025150355385457, | |
| "grad_norm": 0.20756904780864716, | |
| "learning_rate": 0.00025652173913043476, | |
| "loss": 0.754, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.2624384909786769, | |
| "grad_norm": 0.05772192403674126, | |
| "learning_rate": 0.00025869565217391305, | |
| "loss": 0.7944, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2646254784034992, | |
| "grad_norm": 0.0794505923986435, | |
| "learning_rate": 0.0002608695652173913, | |
| "loss": 0.6919, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.26681246582832147, | |
| "grad_norm": 0.13767485320568085, | |
| "learning_rate": 0.00026304347826086957, | |
| "loss": 0.6862, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.2689994532531438, | |
| "grad_norm": 0.06422579288482666, | |
| "learning_rate": 0.0002652173913043478, | |
| "loss": 0.6693, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.2711864406779661, | |
| "grad_norm": 0.35600975155830383, | |
| "learning_rate": 0.00026739130434782604, | |
| "loss": 0.7302, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.2733734281027884, | |
| "grad_norm": 0.07260554283857346, | |
| "learning_rate": 0.0002695652173913043, | |
| "loss": 0.7832, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.27556041552761074, | |
| "grad_norm": 0.06728994846343994, | |
| "learning_rate": 0.0002717391304347826, | |
| "loss": 0.7292, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.277747402952433, | |
| "grad_norm": 0.06649418920278549, | |
| "learning_rate": 0.00027391304347826085, | |
| "loss": 0.5997, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.2799343903772553, | |
| "grad_norm": 0.04817730188369751, | |
| "learning_rate": 0.0002760869565217391, | |
| "loss": 0.4626, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.28212137780207763, | |
| "grad_norm": 0.08356320858001709, | |
| "learning_rate": 0.00027826086956521737, | |
| "loss": 0.6679, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.28430836522689995, | |
| "grad_norm": 0.06807713955640793, | |
| "learning_rate": 0.00028043478260869565, | |
| "loss": 0.9006, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.28649535265172227, | |
| "grad_norm": 0.04999319463968277, | |
| "learning_rate": 0.0002826086956521739, | |
| "loss": 0.5937, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.2886823400765446, | |
| "grad_norm": 0.04334180802106857, | |
| "learning_rate": 0.0002847826086956521, | |
| "loss": 0.6044, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.29086932750136685, | |
| "grad_norm": 0.06852805614471436, | |
| "learning_rate": 0.0002869565217391304, | |
| "loss": 0.683, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.29305631492618917, | |
| "grad_norm": 0.12273514270782471, | |
| "learning_rate": 0.00028913043478260864, | |
| "loss": 0.665, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.2952433023510115, | |
| "grad_norm": 0.10215126723051071, | |
| "learning_rate": 0.00029130434782608693, | |
| "loss": 0.6439, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.2974302897758338, | |
| "grad_norm": 0.07202117145061493, | |
| "learning_rate": 0.0002934782608695652, | |
| "loss": 0.7748, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.2996172772006561, | |
| "grad_norm": 0.14298571646213531, | |
| "learning_rate": 0.00029565217391304345, | |
| "loss": 0.712, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.3018042646254784, | |
| "grad_norm": 0.046531159430742264, | |
| "learning_rate": 0.0002978260869565217, | |
| "loss": 0.6656, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.3039912520503007, | |
| "grad_norm": 0.049203984439373016, | |
| "learning_rate": 0.0003, | |
| "loss": 0.7335, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.306178239475123, | |
| "grad_norm": 0.0616229847073555, | |
| "learning_rate": 0.00029975728155339805, | |
| "loss": 0.7745, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.30836522689994533, | |
| "grad_norm": 0.06660609692335129, | |
| "learning_rate": 0.0002995145631067961, | |
| "loss": 0.5531, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.31055221432476765, | |
| "grad_norm": 0.037682708352804184, | |
| "learning_rate": 0.00029927184466019415, | |
| "loss": 0.6551, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.3127392017495899, | |
| "grad_norm": 0.07913398742675781, | |
| "learning_rate": 0.00029902912621359223, | |
| "loss": 0.6406, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.31492618917441223, | |
| "grad_norm": 0.09453226625919342, | |
| "learning_rate": 0.00029878640776699025, | |
| "loss": 0.7127, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.31711317659923455, | |
| "grad_norm": 0.07528267800807953, | |
| "learning_rate": 0.00029854368932038833, | |
| "loss": 0.7101, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.31930016402405687, | |
| "grad_norm": 0.07698322087526321, | |
| "learning_rate": 0.0002983009708737864, | |
| "loss": 0.658, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.3214871514488792, | |
| "grad_norm": 0.05745327100157738, | |
| "learning_rate": 0.00029805825242718443, | |
| "loss": 0.7634, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3236741388737015, | |
| "grad_norm": 0.09420588612556458, | |
| "learning_rate": 0.0002978155339805825, | |
| "loss": 0.6379, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.32586112629852376, | |
| "grad_norm": 0.09700261056423187, | |
| "learning_rate": 0.0002975728155339806, | |
| "loss": 0.5634, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.3280481137233461, | |
| "grad_norm": 0.0776243656873703, | |
| "learning_rate": 0.0002973300970873786, | |
| "loss": 0.6162, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.3302351011481684, | |
| "grad_norm": 0.02564432844519615, | |
| "learning_rate": 0.0002970873786407767, | |
| "loss": 0.4994, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.3324220885729907, | |
| "grad_norm": 0.05717935040593147, | |
| "learning_rate": 0.00029684466019417477, | |
| "loss": 0.6405, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.33460907599781303, | |
| "grad_norm": 0.05358930304646492, | |
| "learning_rate": 0.0002966019417475728, | |
| "loss": 0.5294, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.3367960634226353, | |
| "grad_norm": 0.09334524720907211, | |
| "learning_rate": 0.00029635922330097087, | |
| "loss": 0.7158, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.3389830508474576, | |
| "grad_norm": 0.1500510573387146, | |
| "learning_rate": 0.00029611650485436894, | |
| "loss": 0.6285, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.34117003827227993, | |
| "grad_norm": 0.07156921923160553, | |
| "learning_rate": 0.00029587378640776697, | |
| "loss": 0.5904, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.34335702569710225, | |
| "grad_norm": 0.05520716309547424, | |
| "learning_rate": 0.00029563106796116505, | |
| "loss": 0.6419, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.34554401312192456, | |
| "grad_norm": 0.060137733817100525, | |
| "learning_rate": 0.00029538834951456307, | |
| "loss": 0.6339, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.3477310005467469, | |
| "grad_norm": 0.04418874531984329, | |
| "learning_rate": 0.00029514563106796115, | |
| "loss": 0.6582, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.34991798797156914, | |
| "grad_norm": 0.14283375442028046, | |
| "learning_rate": 0.0002949029126213592, | |
| "loss": 0.7756, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.35210497539639146, | |
| "grad_norm": 0.12531793117523193, | |
| "learning_rate": 0.00029466019417475725, | |
| "loss": 0.6434, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.3542919628212138, | |
| "grad_norm": 0.06906326115131378, | |
| "learning_rate": 0.0002944174757281553, | |
| "loss": 0.6828, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.3564789502460361, | |
| "grad_norm": 0.10274400562047958, | |
| "learning_rate": 0.00029417475728155335, | |
| "loss": 0.7356, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.3586659376708584, | |
| "grad_norm": 0.07805956155061722, | |
| "learning_rate": 0.0002939320388349514, | |
| "loss": 0.7114, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.3608529250956807, | |
| "grad_norm": 0.059253111481666565, | |
| "learning_rate": 0.0002936893203883495, | |
| "loss": 0.6082, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.363039912520503, | |
| "grad_norm": 0.07108813524246216, | |
| "learning_rate": 0.00029344660194174753, | |
| "loss": 0.7756, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.3652268999453253, | |
| "grad_norm": 0.09952510893344879, | |
| "learning_rate": 0.0002932038834951456, | |
| "loss": 0.6716, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.36741388737014763, | |
| "grad_norm": 0.07966083288192749, | |
| "learning_rate": 0.0002929611650485437, | |
| "loss": 0.6009, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.36960087479496995, | |
| "grad_norm": 0.0691203773021698, | |
| "learning_rate": 0.0002927184466019417, | |
| "loss": 0.5261, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.3717878622197922, | |
| "grad_norm": 0.08406491577625275, | |
| "learning_rate": 0.0002924757281553398, | |
| "loss": 0.6501, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.3739748496446145, | |
| "grad_norm": 0.08572190254926682, | |
| "learning_rate": 0.0002922330097087378, | |
| "loss": 0.7071, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.37616183706943684, | |
| "grad_norm": 0.07649155706167221, | |
| "learning_rate": 0.0002919902912621359, | |
| "loss": 0.6682, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.37834882449425916, | |
| "grad_norm": 0.06834893673658371, | |
| "learning_rate": 0.00029174757281553396, | |
| "loss": 0.6535, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.3805358119190815, | |
| "grad_norm": 0.148598775267601, | |
| "learning_rate": 0.000291504854368932, | |
| "loss": 0.6417, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.3827227993439038, | |
| "grad_norm": 0.09686613082885742, | |
| "learning_rate": 0.00029126213592233006, | |
| "loss": 0.6217, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.38490978676872606, | |
| "grad_norm": 0.047472141683101654, | |
| "learning_rate": 0.00029101941747572814, | |
| "loss": 0.39, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.3870967741935484, | |
| "grad_norm": 0.06221065670251846, | |
| "learning_rate": 0.00029077669902912616, | |
| "loss": 0.7205, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.3892837616183707, | |
| "grad_norm": 0.061044104397296906, | |
| "learning_rate": 0.00029053398058252424, | |
| "loss": 0.6231, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.391470749043193, | |
| "grad_norm": 0.23658694326877594, | |
| "learning_rate": 0.0002902912621359223, | |
| "loss": 0.7941, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.39365773646801533, | |
| "grad_norm": 0.09449535608291626, | |
| "learning_rate": 0.00029004854368932034, | |
| "loss": 0.7093, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.3958447238928376, | |
| "grad_norm": 0.07445142418146133, | |
| "learning_rate": 0.0002898058252427184, | |
| "loss": 0.5534, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.3980317113176599, | |
| "grad_norm": 0.0452071838080883, | |
| "learning_rate": 0.0002895631067961165, | |
| "loss": 0.6028, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.4002186987424822, | |
| "grad_norm": 0.0683695524930954, | |
| "learning_rate": 0.0002893203883495145, | |
| "loss": 0.3688, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.40240568616730454, | |
| "grad_norm": 0.048305071890354156, | |
| "learning_rate": 0.0002890776699029126, | |
| "loss": 0.8294, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.40459267359212686, | |
| "grad_norm": 0.08140576630830765, | |
| "learning_rate": 0.0002888349514563107, | |
| "loss": 0.7538, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.4067796610169492, | |
| "grad_norm": 0.13340412080287933, | |
| "learning_rate": 0.0002885922330097087, | |
| "loss": 0.7636, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.40896664844177144, | |
| "grad_norm": 0.04563393443822861, | |
| "learning_rate": 0.0002883495145631068, | |
| "loss": 0.6064, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.41115363586659376, | |
| "grad_norm": 0.08804814517498016, | |
| "learning_rate": 0.00028810679611650486, | |
| "loss": 0.8036, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.4133406232914161, | |
| "grad_norm": 0.08191797137260437, | |
| "learning_rate": 0.0002878640776699029, | |
| "loss": 0.7082, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.4155276107162384, | |
| "grad_norm": 0.09029440581798553, | |
| "learning_rate": 0.00028762135922330096, | |
| "loss": 0.6588, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.4177145981410607, | |
| "grad_norm": 0.09573402255773544, | |
| "learning_rate": 0.000287378640776699, | |
| "loss": 0.7273, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.419901585565883, | |
| "grad_norm": 0.045617956668138504, | |
| "learning_rate": 0.00028713592233009706, | |
| "loss": 0.461, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.4220885729907053, | |
| "grad_norm": 0.05199209973216057, | |
| "learning_rate": 0.00028689320388349513, | |
| "loss": 0.6877, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.4242755604155276, | |
| "grad_norm": 0.05869835987687111, | |
| "learning_rate": 0.00028665048543689316, | |
| "loss": 0.4181, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.4264625478403499, | |
| "grad_norm": 0.14079192280769348, | |
| "learning_rate": 0.00028640776699029124, | |
| "loss": 0.6978, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.42864953526517224, | |
| "grad_norm": 0.09773832559585571, | |
| "learning_rate": 0.0002861650485436893, | |
| "loss": 0.6019, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.4308365226899945, | |
| "grad_norm": 0.04800047725439072, | |
| "learning_rate": 0.00028592233009708734, | |
| "loss": 0.613, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.4330235101148168, | |
| "grad_norm": 0.056679654866456985, | |
| "learning_rate": 0.0002856796116504854, | |
| "loss": 0.5245, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.43521049753963914, | |
| "grad_norm": 0.04190812632441521, | |
| "learning_rate": 0.0002854368932038835, | |
| "loss": 0.9215, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.43739748496446146, | |
| "grad_norm": 0.11672720313072205, | |
| "learning_rate": 0.0002851941747572815, | |
| "loss": 0.8228, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4395844723892838, | |
| "grad_norm": 0.05854491889476776, | |
| "learning_rate": 0.0002849514563106796, | |
| "loss": 0.7991, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.4417714598141061, | |
| "grad_norm": 0.06320830434560776, | |
| "learning_rate": 0.00028470873786407767, | |
| "loss": 0.6015, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.44395844723892836, | |
| "grad_norm": 0.09820743650197983, | |
| "learning_rate": 0.0002844660194174757, | |
| "loss": 0.5632, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.4461454346637507, | |
| "grad_norm": 0.10225464403629303, | |
| "learning_rate": 0.00028422330097087377, | |
| "loss": 0.8571, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.448332422088573, | |
| "grad_norm": 0.0544356070458889, | |
| "learning_rate": 0.00028398058252427185, | |
| "loss": 0.804, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.4505194095133953, | |
| "grad_norm": 0.10255508124828339, | |
| "learning_rate": 0.0002837378640776699, | |
| "loss": 0.6108, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.4527063969382176, | |
| "grad_norm": 0.06616372615098953, | |
| "learning_rate": 0.00028349514563106795, | |
| "loss": 0.6481, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.4548933843630399, | |
| "grad_norm": 0.0479828417301178, | |
| "learning_rate": 0.00028325242718446603, | |
| "loss": 0.6919, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.4570803717878622, | |
| "grad_norm": 0.06924855709075928, | |
| "learning_rate": 0.00028300970873786405, | |
| "loss": 0.705, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.4592673592126845, | |
| "grad_norm": 0.03697100654244423, | |
| "learning_rate": 0.00028276699029126213, | |
| "loss": 0.7116, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.46145434663750684, | |
| "grad_norm": 0.09638485312461853, | |
| "learning_rate": 0.00028252427184466015, | |
| "loss": 0.7317, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.46364133406232916, | |
| "grad_norm": 0.06610197573900223, | |
| "learning_rate": 0.00028228155339805823, | |
| "loss": 0.6847, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.4658283214871515, | |
| "grad_norm": 0.13233628869056702, | |
| "learning_rate": 0.0002820388349514563, | |
| "loss": 0.6703, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.46801530891197374, | |
| "grad_norm": 0.1086718887090683, | |
| "learning_rate": 0.00028179611650485433, | |
| "loss": 0.7942, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.47020229633679606, | |
| "grad_norm": 0.06878714263439178, | |
| "learning_rate": 0.0002815533980582524, | |
| "loss": 0.5508, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.4723892837616184, | |
| "grad_norm": 0.08610357344150543, | |
| "learning_rate": 0.00028131067961165043, | |
| "loss": 0.6846, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.4745762711864407, | |
| "grad_norm": 0.11769182235002518, | |
| "learning_rate": 0.0002810679611650485, | |
| "loss": 0.686, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.476763258611263, | |
| "grad_norm": 0.060380857437849045, | |
| "learning_rate": 0.0002808252427184466, | |
| "loss": 0.627, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.47895024603608527, | |
| "grad_norm": 0.07884469628334045, | |
| "learning_rate": 0.0002805825242718446, | |
| "loss": 0.6379, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.4811372334609076, | |
| "grad_norm": 0.0553802065551281, | |
| "learning_rate": 0.0002803398058252427, | |
| "loss": 1.234, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4833242208857299, | |
| "grad_norm": 0.053239986300468445, | |
| "learning_rate": 0.0002800970873786407, | |
| "loss": 0.6463, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.4855112083105522, | |
| "grad_norm": 0.09387147426605225, | |
| "learning_rate": 0.0002798543689320388, | |
| "loss": 0.8651, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.48769819573537454, | |
| "grad_norm": 0.08428589254617691, | |
| "learning_rate": 0.00027961165048543687, | |
| "loss": 0.4774, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.4898851831601968, | |
| "grad_norm": 0.09616990387439728, | |
| "learning_rate": 0.0002793689320388349, | |
| "loss": 0.5953, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.4920721705850191, | |
| "grad_norm": 0.12636440992355347, | |
| "learning_rate": 0.00027912621359223297, | |
| "loss": 0.5935, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.49425915800984144, | |
| "grad_norm": 0.04346989095211029, | |
| "learning_rate": 0.00027888349514563105, | |
| "loss": 0.6199, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.49644614543466375, | |
| "grad_norm": 0.07197440415620804, | |
| "learning_rate": 0.00027864077669902907, | |
| "loss": 0.6705, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.4986331328594861, | |
| "grad_norm": 0.08746636658906937, | |
| "learning_rate": 0.00027839805825242715, | |
| "loss": 0.8351, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.5008201202843083, | |
| "grad_norm": 0.1399545520544052, | |
| "learning_rate": 0.0002781553398058252, | |
| "loss": 0.7487, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.5030071077091307, | |
| "grad_norm": 0.0729706659913063, | |
| "learning_rate": 0.00027791262135922325, | |
| "loss": 0.7952, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.505194095133953, | |
| "grad_norm": 0.059242866933345795, | |
| "learning_rate": 0.0002776699029126213, | |
| "loss": 0.6364, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.5073810825587752, | |
| "grad_norm": 0.09419375658035278, | |
| "learning_rate": 0.0002774271844660194, | |
| "loss": 0.8293, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.5095680699835976, | |
| "grad_norm": 0.06085381284356117, | |
| "learning_rate": 0.0002771844660194174, | |
| "loss": 0.8756, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.5117550574084199, | |
| "grad_norm": 0.05615559592843056, | |
| "learning_rate": 0.0002769417475728155, | |
| "loss": 0.618, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.5139420448332422, | |
| "grad_norm": 0.07931798696517944, | |
| "learning_rate": 0.0002766990291262136, | |
| "loss": 0.7047, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.5161290322580645, | |
| "grad_norm": 0.046281419694423676, | |
| "learning_rate": 0.0002764563106796116, | |
| "loss": 0.4596, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.5183160196828869, | |
| "grad_norm": 0.04308830946683884, | |
| "learning_rate": 0.0002762135922330097, | |
| "loss": 0.6457, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.5205030071077091, | |
| "grad_norm": 0.0868159681558609, | |
| "learning_rate": 0.00027597087378640776, | |
| "loss": 0.7541, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.5226899945325314, | |
| "grad_norm": 0.05126407369971275, | |
| "learning_rate": 0.0002757281553398058, | |
| "loss": 0.6661, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.5248769819573538, | |
| "grad_norm": 0.0836605578660965, | |
| "learning_rate": 0.00027548543689320386, | |
| "loss": 0.7741, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.527063969382176, | |
| "grad_norm": 0.10537996888160706, | |
| "learning_rate": 0.00027524271844660194, | |
| "loss": 0.5839, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.5292509568069984, | |
| "grad_norm": 0.06915068626403809, | |
| "learning_rate": 0.00027499999999999996, | |
| "loss": 0.7563, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.5314379442318207, | |
| "grad_norm": 0.05846241116523743, | |
| "learning_rate": 0.00027475728155339804, | |
| "loss": 0.6107, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.5336249316566429, | |
| "grad_norm": 0.07253163307905197, | |
| "learning_rate": 0.0002745145631067961, | |
| "loss": 0.6413, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.5358119190814653, | |
| "grad_norm": 0.0637165755033493, | |
| "learning_rate": 0.00027427184466019414, | |
| "loss": 0.6918, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5379989065062876, | |
| "grad_norm": 0.04841014742851257, | |
| "learning_rate": 0.0002740291262135922, | |
| "loss": 0.4336, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.5401858939311099, | |
| "grad_norm": 0.07899260520935059, | |
| "learning_rate": 0.0002737864077669903, | |
| "loss": 1.1465, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.5423728813559322, | |
| "grad_norm": 0.128900945186615, | |
| "learning_rate": 0.0002735436893203883, | |
| "loss": 0.7688, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.5445598687807545, | |
| "grad_norm": 0.09204466640949249, | |
| "learning_rate": 0.0002733009708737864, | |
| "loss": 0.7298, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.5467468562055768, | |
| "grad_norm": 0.08942551165819168, | |
| "learning_rate": 0.0002730582524271845, | |
| "loss": 0.6038, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5489338436303991, | |
| "grad_norm": 0.11410896480083466, | |
| "learning_rate": 0.0002728155339805825, | |
| "loss": 0.6325, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.5511208310552215, | |
| "grad_norm": 0.04977932199835777, | |
| "learning_rate": 0.0002725728155339806, | |
| "loss": 0.6421, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.5533078184800437, | |
| "grad_norm": 0.05651041865348816, | |
| "learning_rate": 0.00027233009708737865, | |
| "loss": 0.5886, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.555494805904866, | |
| "grad_norm": 0.056123483926057816, | |
| "learning_rate": 0.0002720873786407767, | |
| "loss": 0.5284, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.5576817933296884, | |
| "grad_norm": 0.05689515545964241, | |
| "learning_rate": 0.00027184466019417475, | |
| "loss": 0.6985, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.5598687807545106, | |
| "grad_norm": 0.05087985843420029, | |
| "learning_rate": 0.0002716019417475728, | |
| "loss": 0.5622, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.562055768179333, | |
| "grad_norm": 0.03652990981936455, | |
| "learning_rate": 0.00027135922330097086, | |
| "loss": 0.3838, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.5642427556041553, | |
| "grad_norm": 0.20960339903831482, | |
| "learning_rate": 0.00027111650485436893, | |
| "loss": 0.7656, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.5664297430289775, | |
| "grad_norm": 0.05671091377735138, | |
| "learning_rate": 0.00027087378640776696, | |
| "loss": 0.6568, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.5686167304537999, | |
| "grad_norm": 0.07829181104898453, | |
| "learning_rate": 0.00027063106796116503, | |
| "loss": 0.6861, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.5708037178786222, | |
| "grad_norm": 0.10538700968027115, | |
| "learning_rate": 0.0002703883495145631, | |
| "loss": 0.911, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.5729907053034445, | |
| "grad_norm": 0.03851666674017906, | |
| "learning_rate": 0.00027014563106796114, | |
| "loss": 0.5833, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.5751776927282668, | |
| "grad_norm": 0.09947405755519867, | |
| "learning_rate": 0.0002699029126213592, | |
| "loss": 0.6924, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.5773646801530892, | |
| "grad_norm": 0.05998876690864563, | |
| "learning_rate": 0.00026966019417475724, | |
| "loss": 0.6582, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.5795516675779114, | |
| "grad_norm": 0.05930376797914505, | |
| "learning_rate": 0.0002694174757281553, | |
| "loss": 0.6135, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.5817386550027337, | |
| "grad_norm": 0.06882154196500778, | |
| "learning_rate": 0.0002691747572815534, | |
| "loss": 0.6843, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.5839256424275561, | |
| "grad_norm": 0.05111825838685036, | |
| "learning_rate": 0.0002689320388349514, | |
| "loss": 0.8738, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.5861126298523783, | |
| "grad_norm": 0.03514356166124344, | |
| "learning_rate": 0.0002686893203883495, | |
| "loss": 0.5535, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.5882996172772007, | |
| "grad_norm": 0.04661941900849342, | |
| "learning_rate": 0.0002684466019417475, | |
| "loss": 0.5437, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.590486604702023, | |
| "grad_norm": 0.11490173637866974, | |
| "learning_rate": 0.0002682038834951456, | |
| "loss": 0.7309, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5926735921268452, | |
| "grad_norm": 0.06036648526787758, | |
| "learning_rate": 0.00026796116504854367, | |
| "loss": 0.7171, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.5948605795516676, | |
| "grad_norm": 0.07145757973194122, | |
| "learning_rate": 0.0002677184466019417, | |
| "loss": 0.8972, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.5970475669764899, | |
| "grad_norm": 0.03046463429927826, | |
| "learning_rate": 0.00026747572815533977, | |
| "loss": 0.6225, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.5992345544013122, | |
| "grad_norm": 0.058935679495334625, | |
| "learning_rate": 0.00026723300970873785, | |
| "loss": 0.7951, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.6014215418261345, | |
| "grad_norm": 0.08104430884122849, | |
| "learning_rate": 0.0002669902912621359, | |
| "loss": 0.7906, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.6036085292509568, | |
| "grad_norm": 0.037242889404296875, | |
| "learning_rate": 0.00026674757281553395, | |
| "loss": 0.4116, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.6057955166757791, | |
| "grad_norm": 0.06395541876554489, | |
| "learning_rate": 0.00026650485436893203, | |
| "loss": 0.5387, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.6079825041006014, | |
| "grad_norm": 0.09105798602104187, | |
| "learning_rate": 0.00026626213592233005, | |
| "loss": 0.76, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.6101694915254238, | |
| "grad_norm": 0.0888744667172432, | |
| "learning_rate": 0.00026601941747572813, | |
| "loss": 0.5783, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.612356478950246, | |
| "grad_norm": 0.0881604477763176, | |
| "learning_rate": 0.0002657766990291262, | |
| "loss": 0.6175, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6145434663750683, | |
| "grad_norm": 0.0212717242538929, | |
| "learning_rate": 0.00026553398058252423, | |
| "loss": 0.6384, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.6167304537998907, | |
| "grad_norm": 0.08296003937721252, | |
| "learning_rate": 0.0002652912621359223, | |
| "loss": 0.7919, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.6189174412247129, | |
| "grad_norm": 0.05302100628614426, | |
| "learning_rate": 0.0002650485436893204, | |
| "loss": 0.7996, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.6211044286495353, | |
| "grad_norm": 0.05207206308841705, | |
| "learning_rate": 0.0002648058252427184, | |
| "loss": 0.5176, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.6232914160743576, | |
| "grad_norm": 0.08915773034095764, | |
| "learning_rate": 0.0002645631067961165, | |
| "loss": 0.7059, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6254784034991798, | |
| "grad_norm": 0.12500520050525665, | |
| "learning_rate": 0.00026432038834951456, | |
| "loss": 0.8228, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.6276653909240022, | |
| "grad_norm": 0.04379987716674805, | |
| "learning_rate": 0.0002640776699029126, | |
| "loss": 0.5246, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.6298523783488245, | |
| "grad_norm": 0.10752689838409424, | |
| "learning_rate": 0.00026383495145631067, | |
| "loss": 0.7095, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.6320393657736468, | |
| "grad_norm": 0.12701405584812164, | |
| "learning_rate": 0.00026359223300970874, | |
| "loss": 0.7316, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.6342263531984691, | |
| "grad_norm": 0.11033506691455841, | |
| "learning_rate": 0.00026334951456310677, | |
| "loss": 0.7736, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6364133406232915, | |
| "grad_norm": 0.04730546101927757, | |
| "learning_rate": 0.00026310679611650484, | |
| "loss": 0.5268, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.6386003280481137, | |
| "grad_norm": 0.10472558438777924, | |
| "learning_rate": 0.0002628640776699029, | |
| "loss": 0.6933, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.640787315472936, | |
| "grad_norm": 0.056678298860788345, | |
| "learning_rate": 0.00026262135922330094, | |
| "loss": 0.4571, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.6429743028977584, | |
| "grad_norm": 0.07412777096033096, | |
| "learning_rate": 0.000262378640776699, | |
| "loss": 0.6422, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 0.08613819628953934, | |
| "learning_rate": 0.00026213592233009705, | |
| "loss": 0.7245, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.647348277747403, | |
| "grad_norm": 0.0964793786406517, | |
| "learning_rate": 0.0002618932038834951, | |
| "loss": 0.7397, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.6495352651722253, | |
| "grad_norm": 0.08231832832098007, | |
| "learning_rate": 0.0002616504854368932, | |
| "loss": 0.4, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.6517222525970475, | |
| "grad_norm": 0.09483752399682999, | |
| "learning_rate": 0.0002614077669902912, | |
| "loss": 0.7464, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.6539092400218699, | |
| "grad_norm": 0.041885074228048325, | |
| "learning_rate": 0.0002611650485436893, | |
| "loss": 0.4597, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.6560962274466922, | |
| "grad_norm": 0.07590518891811371, | |
| "learning_rate": 0.0002609223300970874, | |
| "loss": 0.9338, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.6582832148715145, | |
| "grad_norm": 0.06022942438721657, | |
| "learning_rate": 0.0002606796116504854, | |
| "loss": 0.5732, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.6604702022963368, | |
| "grad_norm": 0.0570467971265316, | |
| "learning_rate": 0.0002604368932038835, | |
| "loss": 0.5997, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.6626571897211591, | |
| "grad_norm": 0.028766008093953133, | |
| "learning_rate": 0.00026019417475728156, | |
| "loss": 0.7863, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.6648441771459814, | |
| "grad_norm": 0.03608011081814766, | |
| "learning_rate": 0.0002599514563106796, | |
| "loss": 0.703, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.6670311645708037, | |
| "grad_norm": 0.07903197407722473, | |
| "learning_rate": 0.00025970873786407766, | |
| "loss": 0.6879, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.6692181519956261, | |
| "grad_norm": 0.04391651973128319, | |
| "learning_rate": 0.00025946601941747574, | |
| "loss": 0.6, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.6714051394204483, | |
| "grad_norm": 0.0842578336596489, | |
| "learning_rate": 0.00025922330097087376, | |
| "loss": 0.7932, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.6735921268452706, | |
| "grad_norm": 0.061180584132671356, | |
| "learning_rate": 0.00025898058252427184, | |
| "loss": 0.8001, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.675779114270093, | |
| "grad_norm": 0.08094706386327744, | |
| "learning_rate": 0.00025873786407766986, | |
| "loss": 0.604, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.6779661016949152, | |
| "grad_norm": 0.05069729685783386, | |
| "learning_rate": 0.00025849514563106794, | |
| "loss": 0.5355, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.6801530891197376, | |
| "grad_norm": 0.0906616821885109, | |
| "learning_rate": 0.000258252427184466, | |
| "loss": 0.5386, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.6823400765445599, | |
| "grad_norm": 0.02921673282980919, | |
| "learning_rate": 0.00025800970873786404, | |
| "loss": 0.4077, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.6845270639693821, | |
| "grad_norm": 0.14991872012615204, | |
| "learning_rate": 0.0002577669902912621, | |
| "loss": 0.7154, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.6867140513942045, | |
| "grad_norm": 0.08407143503427505, | |
| "learning_rate": 0.00025752427184466014, | |
| "loss": 0.7446, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.6889010388190268, | |
| "grad_norm": 0.0366005077958107, | |
| "learning_rate": 0.0002572815533980582, | |
| "loss": 0.6288, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.6910880262438491, | |
| "grad_norm": 0.07749815285205841, | |
| "learning_rate": 0.0002570388349514563, | |
| "loss": 0.7508, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.6932750136686714, | |
| "grad_norm": 0.080388642847538, | |
| "learning_rate": 0.0002567961165048543, | |
| "loss": 0.6253, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.6954620010934938, | |
| "grad_norm": 0.07014317810535431, | |
| "learning_rate": 0.0002565533980582524, | |
| "loss": 0.6952, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.697648988518316, | |
| "grad_norm": 0.0520801767706871, | |
| "learning_rate": 0.0002563106796116505, | |
| "loss": 0.4726, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.6998359759431383, | |
| "grad_norm": 0.07832513004541397, | |
| "learning_rate": 0.0002560679611650485, | |
| "loss": 0.7236, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7020229633679607, | |
| "grad_norm": 0.14095266163349152, | |
| "learning_rate": 0.0002558252427184466, | |
| "loss": 0.8119, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.7042099507927829, | |
| "grad_norm": 0.06780201941728592, | |
| "learning_rate": 0.0002555825242718446, | |
| "loss": 0.5814, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.7063969382176053, | |
| "grad_norm": 0.11610683053731918, | |
| "learning_rate": 0.0002553398058252427, | |
| "loss": 0.679, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.7085839256424276, | |
| "grad_norm": 0.10824164003133774, | |
| "learning_rate": 0.00025509708737864075, | |
| "loss": 0.6732, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.7107709130672498, | |
| "grad_norm": 0.02975596860051155, | |
| "learning_rate": 0.0002548543689320388, | |
| "loss": 0.5066, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.7129579004920722, | |
| "grad_norm": 0.060648877173662186, | |
| "learning_rate": 0.00025461165048543686, | |
| "loss": 0.6598, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.7151448879168945, | |
| "grad_norm": 0.06371823698282242, | |
| "learning_rate": 0.00025436893203883493, | |
| "loss": 0.8467, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.7173318753417168, | |
| "grad_norm": 0.585040271282196, | |
| "learning_rate": 0.00025412621359223296, | |
| "loss": 0.6203, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.7195188627665391, | |
| "grad_norm": 0.094622403383255, | |
| "learning_rate": 0.00025388349514563103, | |
| "loss": 0.7434, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.7217058501913614, | |
| "grad_norm": 0.11974699050188065, | |
| "learning_rate": 0.0002536407766990291, | |
| "loss": 0.9787, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7238928376161837, | |
| "grad_norm": 0.06584473699331284, | |
| "learning_rate": 0.00025339805825242714, | |
| "loss": 0.5138, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.726079825041006, | |
| "grad_norm": 0.0335596539080143, | |
| "learning_rate": 0.0002531553398058252, | |
| "loss": 0.5124, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.7282668124658284, | |
| "grad_norm": 0.08706491440534592, | |
| "learning_rate": 0.0002529126213592233, | |
| "loss": 0.7463, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.7304537998906506, | |
| "grad_norm": 0.06929214298725128, | |
| "learning_rate": 0.0002526699029126213, | |
| "loss": 0.4162, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.7326407873154729, | |
| "grad_norm": 0.08074366301298141, | |
| "learning_rate": 0.0002524271844660194, | |
| "loss": 0.4847, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7348277747402953, | |
| "grad_norm": 0.04467281326651573, | |
| "learning_rate": 0.00025218446601941747, | |
| "loss": 0.8289, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.7370147621651175, | |
| "grad_norm": 0.0674748346209526, | |
| "learning_rate": 0.0002519417475728155, | |
| "loss": 0.7378, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.7392017495899399, | |
| "grad_norm": 0.05383702740073204, | |
| "learning_rate": 0.00025169902912621357, | |
| "loss": 0.6848, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.7413887370147622, | |
| "grad_norm": 0.08543651551008224, | |
| "learning_rate": 0.00025145631067961165, | |
| "loss": 0.8121, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.7435757244395844, | |
| "grad_norm": 0.06835243105888367, | |
| "learning_rate": 0.00025121359223300967, | |
| "loss": 0.7358, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7457627118644068, | |
| "grad_norm": 0.0346585176885128, | |
| "learning_rate": 0.00025097087378640775, | |
| "loss": 0.5873, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.747949699289229, | |
| "grad_norm": 0.05856507271528244, | |
| "learning_rate": 0.0002507281553398058, | |
| "loss": 0.5853, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.7501366867140514, | |
| "grad_norm": 0.06167592853307724, | |
| "learning_rate": 0.00025048543689320385, | |
| "loss": 0.7931, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.7523236741388737, | |
| "grad_norm": 0.050117284059524536, | |
| "learning_rate": 0.00025024271844660193, | |
| "loss": 0.5862, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.7545106615636961, | |
| "grad_norm": 0.09028457850217819, | |
| "learning_rate": 0.00025, | |
| "loss": 0.8125, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.7566976489885183, | |
| "grad_norm": 0.07750166952610016, | |
| "learning_rate": 0.00024975728155339803, | |
| "loss": 0.8384, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.7588846364133406, | |
| "grad_norm": 0.11774775385856628, | |
| "learning_rate": 0.0002495145631067961, | |
| "loss": 0.5295, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.761071623838163, | |
| "grad_norm": 0.07647069543600082, | |
| "learning_rate": 0.0002492718446601942, | |
| "loss": 0.7767, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.7632586112629852, | |
| "grad_norm": 0.05447927862405777, | |
| "learning_rate": 0.0002490291262135922, | |
| "loss": 0.5385, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.7654455986878076, | |
| "grad_norm": 0.07965091615915298, | |
| "learning_rate": 0.0002487864077669903, | |
| "loss": 0.6024, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.7676325861126299, | |
| "grad_norm": 0.09056472778320312, | |
| "learning_rate": 0.00024854368932038836, | |
| "loss": 0.7804, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.7698195735374521, | |
| "grad_norm": 0.07823322713375092, | |
| "learning_rate": 0.0002483009708737864, | |
| "loss": 0.7167, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.7720065609622745, | |
| "grad_norm": 0.09253627061843872, | |
| "learning_rate": 0.00024805825242718446, | |
| "loss": 0.8845, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 0.09817380458116531, | |
| "learning_rate": 0.00024781553398058254, | |
| "loss": 0.6951, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.7763805358119191, | |
| "grad_norm": 0.1380646675825119, | |
| "learning_rate": 0.00024757281553398056, | |
| "loss": 0.6207, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.7785675232367414, | |
| "grad_norm": 0.07183520495891571, | |
| "learning_rate": 0.00024733009708737864, | |
| "loss": 0.7533, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.7807545106615636, | |
| "grad_norm": 0.06690984219312668, | |
| "learning_rate": 0.00024708737864077667, | |
| "loss": 0.5925, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.782941498086386, | |
| "grad_norm": 0.06885010749101639, | |
| "learning_rate": 0.00024684466019417474, | |
| "loss": 0.6954, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.7851284855112083, | |
| "grad_norm": 0.12178938090801239, | |
| "learning_rate": 0.0002466019417475728, | |
| "loss": 0.7202, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.7873154729360307, | |
| "grad_norm": 0.05695571005344391, | |
| "learning_rate": 0.00024635922330097084, | |
| "loss": 0.4058, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.7895024603608529, | |
| "grad_norm": 0.0663481280207634, | |
| "learning_rate": 0.0002461165048543689, | |
| "loss": 0.611, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.7916894477856752, | |
| "grad_norm": 0.08184046298265457, | |
| "learning_rate": 0.00024587378640776695, | |
| "loss": 0.91, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.7938764352104976, | |
| "grad_norm": 0.09832902252674103, | |
| "learning_rate": 0.000245631067961165, | |
| "loss": 0.8614, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.7960634226353198, | |
| "grad_norm": 0.09554405510425568, | |
| "learning_rate": 0.0002453883495145631, | |
| "loss": 0.738, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.7982504100601422, | |
| "grad_norm": 0.042675334960222244, | |
| "learning_rate": 0.0002451456310679611, | |
| "loss": 0.5809, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.8004373974849645, | |
| "grad_norm": 0.17304395139217377, | |
| "learning_rate": 0.0002449029126213592, | |
| "loss": 0.7967, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.8026243849097867, | |
| "grad_norm": 0.10548993200063705, | |
| "learning_rate": 0.0002446601941747572, | |
| "loss": 0.673, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.8048113723346091, | |
| "grad_norm": 0.07851067185401917, | |
| "learning_rate": 0.0002444174757281553, | |
| "loss": 0.6461, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.8069983597594313, | |
| "grad_norm": 0.042184650897979736, | |
| "learning_rate": 0.0002441747572815534, | |
| "loss": 0.7225, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.8091853471842537, | |
| "grad_norm": 0.08041860163211823, | |
| "learning_rate": 0.00024393203883495143, | |
| "loss": 0.7501, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.811372334609076, | |
| "grad_norm": 0.11619728803634644, | |
| "learning_rate": 0.0002436893203883495, | |
| "loss": 0.5616, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.8135593220338984, | |
| "grad_norm": 0.07489950209856033, | |
| "learning_rate": 0.00024344660194174756, | |
| "loss": 0.4979, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.8157463094587206, | |
| "grad_norm": 0.1000688374042511, | |
| "learning_rate": 0.0002432038834951456, | |
| "loss": 0.8427, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.8179332968835429, | |
| "grad_norm": 0.12430839985609055, | |
| "learning_rate": 0.00024296116504854366, | |
| "loss": 0.6974, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.8201202843083653, | |
| "grad_norm": 0.04190218076109886, | |
| "learning_rate": 0.00024271844660194174, | |
| "loss": 0.6709, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.8223072717331875, | |
| "grad_norm": 0.04408605396747589, | |
| "learning_rate": 0.0002424757281553398, | |
| "loss": 0.7654, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.8244942591580099, | |
| "grad_norm": 0.09234176576137543, | |
| "learning_rate": 0.00024223300970873784, | |
| "loss": 0.6825, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.8266812465828322, | |
| "grad_norm": 0.04161737486720085, | |
| "learning_rate": 0.00024199029126213592, | |
| "loss": 0.5359, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.8288682340076544, | |
| "grad_norm": 0.0515722818672657, | |
| "learning_rate": 0.00024174757281553394, | |
| "loss": 0.7162, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.8310552214324768, | |
| "grad_norm": 0.028478797525167465, | |
| "learning_rate": 0.00024150485436893202, | |
| "loss": 0.531, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.833242208857299, | |
| "grad_norm": 0.06542545557022095, | |
| "learning_rate": 0.0002412621359223301, | |
| "loss": 0.726, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.8354291962821214, | |
| "grad_norm": 0.04673764482140541, | |
| "learning_rate": 0.00024101941747572812, | |
| "loss": 0.5919, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.8376161837069437, | |
| "grad_norm": 0.09057341516017914, | |
| "learning_rate": 0.0002407766990291262, | |
| "loss": 0.8387, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.839803171131766, | |
| "grad_norm": 0.06963503360748291, | |
| "learning_rate": 0.00024053398058252427, | |
| "loss": 0.5094, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.8419901585565883, | |
| "grad_norm": 0.10514622181653976, | |
| "learning_rate": 0.0002402912621359223, | |
| "loss": 0.7377, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.8441771459814106, | |
| "grad_norm": 0.15959374606609344, | |
| "learning_rate": 0.00024004854368932037, | |
| "loss": 0.5388, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.846364133406233, | |
| "grad_norm": 0.058087654411792755, | |
| "learning_rate": 0.00023980582524271845, | |
| "loss": 0.6291, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.8485511208310552, | |
| "grad_norm": 0.06005719676613808, | |
| "learning_rate": 0.00023956310679611648, | |
| "loss": 0.8149, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.8507381082558775, | |
| "grad_norm": 0.0970982164144516, | |
| "learning_rate": 0.00023932038834951455, | |
| "loss": 0.7297, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.8529250956806999, | |
| "grad_norm": 0.14135178923606873, | |
| "learning_rate": 0.0002390776699029126, | |
| "loss": 0.5565, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.8551120831055221, | |
| "grad_norm": 0.07173170149326324, | |
| "learning_rate": 0.00023883495145631065, | |
| "loss": 0.7412, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.8572990705303445, | |
| "grad_norm": 0.07636596262454987, | |
| "learning_rate": 0.00023859223300970873, | |
| "loss": 0.6125, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.8594860579551667, | |
| "grad_norm": 0.10493489354848862, | |
| "learning_rate": 0.00023834951456310678, | |
| "loss": 0.66, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.861673045379989, | |
| "grad_norm": 0.08531126379966736, | |
| "learning_rate": 0.00023810679611650483, | |
| "loss": 0.8011, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.8638600328048114, | |
| "grad_norm": 0.0813411995768547, | |
| "learning_rate": 0.0002378640776699029, | |
| "loss": 0.7221, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.8660470202296336, | |
| "grad_norm": 0.07029952853918076, | |
| "learning_rate": 0.00023762135922330096, | |
| "loss": 0.6196, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.868234007654456, | |
| "grad_norm": 0.05257638543844223, | |
| "learning_rate": 0.000237378640776699, | |
| "loss": 0.5878, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.8704209950792783, | |
| "grad_norm": 0.0511767715215683, | |
| "learning_rate": 0.00023713592233009706, | |
| "loss": 0.5656, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.8726079825041007, | |
| "grad_norm": 0.04756753519177437, | |
| "learning_rate": 0.0002368932038834951, | |
| "loss": 0.6717, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.8747949699289229, | |
| "grad_norm": 0.05377547815442085, | |
| "learning_rate": 0.0002366504854368932, | |
| "loss": 0.6772, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.8769819573537452, | |
| "grad_norm": 0.048965323716402054, | |
| "learning_rate": 0.00023640776699029124, | |
| "loss": 0.7175, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.8791689447785676, | |
| "grad_norm": 0.0994691550731659, | |
| "learning_rate": 0.0002361650485436893, | |
| "loss": 0.6364, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.8813559322033898, | |
| "grad_norm": 0.11497332155704498, | |
| "learning_rate": 0.00023592233009708734, | |
| "loss": 0.8192, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.8835429196282122, | |
| "grad_norm": 0.05721667408943176, | |
| "learning_rate": 0.00023567961165048542, | |
| "loss": 0.6384, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.8857299070530344, | |
| "grad_norm": 0.09783297032117844, | |
| "learning_rate": 0.00023543689320388347, | |
| "loss": 0.73, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.8879168944778567, | |
| "grad_norm": 0.06903696805238724, | |
| "learning_rate": 0.00023519417475728152, | |
| "loss": 0.6751, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.8901038819026791, | |
| "grad_norm": 0.031625282019376755, | |
| "learning_rate": 0.0002349514563106796, | |
| "loss": 0.4768, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.8922908693275013, | |
| "grad_norm": 0.09231697767972946, | |
| "learning_rate": 0.00023470873786407762, | |
| "loss": 0.5265, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.8944778567523237, | |
| "grad_norm": 0.06142432987689972, | |
| "learning_rate": 0.0002344660194174757, | |
| "loss": 0.7142, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.896664844177146, | |
| "grad_norm": 0.07211441546678543, | |
| "learning_rate": 0.00023422330097087378, | |
| "loss": 0.7355, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.8988518316019682, | |
| "grad_norm": 0.05186145380139351, | |
| "learning_rate": 0.0002339805825242718, | |
| "loss": 0.7309, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.9010388190267906, | |
| "grad_norm": 0.07550069689750671, | |
| "learning_rate": 0.00023373786407766988, | |
| "loss": 0.5684, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.9032258064516129, | |
| "grad_norm": 0.11441147327423096, | |
| "learning_rate": 0.00023349514563106795, | |
| "loss": 0.7721, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.9054127938764353, | |
| "grad_norm": 0.03658903390169144, | |
| "learning_rate": 0.00023325242718446598, | |
| "loss": 0.6225, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.9075997813012575, | |
| "grad_norm": 0.0618315227329731, | |
| "learning_rate": 0.00023300970873786406, | |
| "loss": 0.6044, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.9097867687260798, | |
| "grad_norm": 0.10633370280265808, | |
| "learning_rate": 0.00023276699029126213, | |
| "loss": 0.8217, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.9119737561509021, | |
| "grad_norm": 0.09450804442167282, | |
| "learning_rate": 0.00023252427184466016, | |
| "loss": 0.6532, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.9141607435757244, | |
| "grad_norm": 0.04320994019508362, | |
| "learning_rate": 0.00023228155339805823, | |
| "loss": 0.6539, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.9163477310005468, | |
| "grad_norm": 0.1247217059135437, | |
| "learning_rate": 0.00023203883495145629, | |
| "loss": 0.6525, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.918534718425369, | |
| "grad_norm": 0.06656447798013687, | |
| "learning_rate": 0.00023179611650485434, | |
| "loss": 0.6559, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9207217058501913, | |
| "grad_norm": 0.06622477620840073, | |
| "learning_rate": 0.0002315533980582524, | |
| "loss": 0.8571, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.9229086932750137, | |
| "grad_norm": 0.07110453397035599, | |
| "learning_rate": 0.00023131067961165046, | |
| "loss": 0.6829, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.9250956806998359, | |
| "grad_norm": 0.09340423345565796, | |
| "learning_rate": 0.00023106796116504851, | |
| "loss": 0.5177, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.9272826681246583, | |
| "grad_norm": 0.08478957414627075, | |
| "learning_rate": 0.0002308252427184466, | |
| "loss": 0.5144, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.9294696555494806, | |
| "grad_norm": 0.08036354184150696, | |
| "learning_rate": 0.00023058252427184464, | |
| "loss": 0.5751, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.931656642974303, | |
| "grad_norm": 0.16781304776668549, | |
| "learning_rate": 0.0002303398058252427, | |
| "loss": 0.7269, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.9338436303991252, | |
| "grad_norm": 0.04996452108025551, | |
| "learning_rate": 0.00023009708737864074, | |
| "loss": 0.9213, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.9360306178239475, | |
| "grad_norm": 0.06372474879026413, | |
| "learning_rate": 0.00022985436893203882, | |
| "loss": 0.7192, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.9382176052487698, | |
| "grad_norm": 0.08853182196617126, | |
| "learning_rate": 0.00022961165048543687, | |
| "loss": 0.6885, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.9404045926735921, | |
| "grad_norm": 0.09669560939073563, | |
| "learning_rate": 0.00022936893203883492, | |
| "loss": 0.7755, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.9425915800984145, | |
| "grad_norm": 0.08254298567771912, | |
| "learning_rate": 0.000229126213592233, | |
| "loss": 0.6438, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.9447785675232367, | |
| "grad_norm": 0.05825486406683922, | |
| "learning_rate": 0.00022888349514563102, | |
| "loss": 0.6271, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.946965554948059, | |
| "grad_norm": 0.07293659448623657, | |
| "learning_rate": 0.0002286407766990291, | |
| "loss": 0.7427, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.9491525423728814, | |
| "grad_norm": 0.06341248005628586, | |
| "learning_rate": 0.00022839805825242718, | |
| "loss": 0.5967, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.9513395297977036, | |
| "grad_norm": 0.05031775310635567, | |
| "learning_rate": 0.0002281553398058252, | |
| "loss": 0.8006, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.953526517222526, | |
| "grad_norm": 0.07868911325931549, | |
| "learning_rate": 0.00022791262135922328, | |
| "loss": 0.7485, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.9557135046473483, | |
| "grad_norm": 0.048034172505140305, | |
| "learning_rate": 0.00022766990291262136, | |
| "loss": 0.5678, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.9579004920721705, | |
| "grad_norm": 0.24086016416549683, | |
| "learning_rate": 0.00022742718446601938, | |
| "loss": 0.5608, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.9600874794969929, | |
| "grad_norm": 0.11558078229427338, | |
| "learning_rate": 0.00022718446601941746, | |
| "loss": 0.6857, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.9622744669218152, | |
| "grad_norm": 0.0938619077205658, | |
| "learning_rate": 0.00022694174757281554, | |
| "loss": 0.7646, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.9644614543466375, | |
| "grad_norm": 0.06463375687599182, | |
| "learning_rate": 0.00022669902912621356, | |
| "loss": 0.5471, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.9666484417714598, | |
| "grad_norm": 0.09269668161869049, | |
| "learning_rate": 0.00022645631067961164, | |
| "loss": 0.599, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.9688354291962821, | |
| "grad_norm": 0.05435742810368538, | |
| "learning_rate": 0.0002262135922330097, | |
| "loss": 0.6006, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.9710224166211044, | |
| "grad_norm": 0.08216127008199692, | |
| "learning_rate": 0.00022597087378640774, | |
| "loss": 0.9168, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.9732094040459267, | |
| "grad_norm": 0.09296419471502304, | |
| "learning_rate": 0.00022572815533980582, | |
| "loss": 0.7166, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.9753963914707491, | |
| "grad_norm": 0.04655352607369423, | |
| "learning_rate": 0.00022548543689320387, | |
| "loss": 0.6178, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.9775833788955713, | |
| "grad_norm": 0.06913512200117111, | |
| "learning_rate": 0.00022524271844660192, | |
| "loss": 0.6967, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.9797703663203936, | |
| "grad_norm": 0.06842842698097229, | |
| "learning_rate": 0.000225, | |
| "loss": 0.6422, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.981957353745216, | |
| "grad_norm": 0.05165997892618179, | |
| "learning_rate": 0.00022475728155339804, | |
| "loss": 0.8705, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.9841443411700382, | |
| "grad_norm": 0.051989976316690445, | |
| "learning_rate": 0.0002245145631067961, | |
| "loss": 0.6084, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.9863313285948606, | |
| "grad_norm": 0.04412755370140076, | |
| "learning_rate": 0.00022427184466019415, | |
| "loss": 0.894, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.9885183160196829, | |
| "grad_norm": 0.0869666188955307, | |
| "learning_rate": 0.00022402912621359222, | |
| "loss": 0.8823, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.9907053034445052, | |
| "grad_norm": 0.09454749524593353, | |
| "learning_rate": 0.00022378640776699027, | |
| "loss": 0.777, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.9928922908693275, | |
| "grad_norm": 0.06162366643548012, | |
| "learning_rate": 0.00022354368932038832, | |
| "loss": 0.6904, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.9950792782941498, | |
| "grad_norm": 0.06763439625501633, | |
| "learning_rate": 0.0002233009708737864, | |
| "loss": 0.624, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.9972662657189721, | |
| "grad_norm": 0.09152118861675262, | |
| "learning_rate": 0.00022305825242718443, | |
| "loss": 0.8294, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.9994532531437944, | |
| "grad_norm": 0.09418030828237534, | |
| "learning_rate": 0.0002228155339805825, | |
| "loss": 0.6718, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.304161936044693, | |
| "learning_rate": 0.00022257281553398058, | |
| "loss": 0.7043, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.0021869874248224, | |
| "grad_norm": 0.0715775266289711, | |
| "learning_rate": 0.0002223300970873786, | |
| "loss": 0.8464, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.0043739748496445, | |
| "grad_norm": 0.07122276723384857, | |
| "learning_rate": 0.00022208737864077668, | |
| "loss": 0.9641, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.006560962274467, | |
| "grad_norm": 0.07358932495117188, | |
| "learning_rate": 0.00022184466019417476, | |
| "loss": 0.6549, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.0087479496992893, | |
| "grad_norm": 0.07040970772504807, | |
| "learning_rate": 0.00022160194174757278, | |
| "loss": 0.4505, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.0109349371241116, | |
| "grad_norm": 0.06049482896924019, | |
| "learning_rate": 0.00022135922330097086, | |
| "loss": 0.5927, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.0131219245489338, | |
| "grad_norm": 0.0695071890950203, | |
| "learning_rate": 0.00022111650485436894, | |
| "loss": 0.5383, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.0153089119737562, | |
| "grad_norm": 0.08132364600896835, | |
| "learning_rate": 0.00022087378640776696, | |
| "loss": 0.847, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.0174958993985785, | |
| "grad_norm": 0.05544696003198624, | |
| "learning_rate": 0.00022063106796116504, | |
| "loss": 0.4345, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.0196828868234007, | |
| "grad_norm": 0.05969703197479248, | |
| "learning_rate": 0.0002203883495145631, | |
| "loss": 0.4453, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.021869874248223, | |
| "grad_norm": 0.1177251935005188, | |
| "learning_rate": 0.00022014563106796114, | |
| "loss": 0.7104, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.0240568616730454, | |
| "grad_norm": 0.06920936703681946, | |
| "learning_rate": 0.00021990291262135922, | |
| "loss": 0.7134, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.0262438490978676, | |
| "grad_norm": 0.07872872799634933, | |
| "learning_rate": 0.00021966019417475727, | |
| "loss": 0.7264, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.02843083652269, | |
| "grad_norm": 0.07836636900901794, | |
| "learning_rate": 0.00021941747572815532, | |
| "loss": 0.67, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.0306178239475123, | |
| "grad_norm": 0.11644013226032257, | |
| "learning_rate": 0.00021917475728155337, | |
| "loss": 0.6805, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.0328048113723347, | |
| "grad_norm": 0.0698661059141159, | |
| "learning_rate": 0.00021893203883495145, | |
| "loss": 0.8073, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.0349917987971569, | |
| "grad_norm": 0.11226359009742737, | |
| "learning_rate": 0.0002186893203883495, | |
| "loss": 0.7748, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.0371787862219792, | |
| "grad_norm": 0.05350861698389053, | |
| "learning_rate": 0.00021844660194174755, | |
| "loss": 0.3718, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.0393657736468016, | |
| "grad_norm": 0.08481690287590027, | |
| "learning_rate": 0.00021820388349514562, | |
| "loss": 0.7288, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.0415527610716238, | |
| "grad_norm": 0.03595307841897011, | |
| "learning_rate": 0.00021796116504854368, | |
| "loss": 0.3986, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.0437397484964461, | |
| "grad_norm": 0.03737505525350571, | |
| "learning_rate": 0.00021771844660194173, | |
| "loss": 0.7487, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.0459267359212685, | |
| "grad_norm": 0.09312517195940018, | |
| "learning_rate": 0.0002174757281553398, | |
| "loss": 0.6514, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.0481137233460907, | |
| "grad_norm": 0.04990383982658386, | |
| "learning_rate": 0.00021723300970873783, | |
| "loss": 0.5012, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.050300710770913, | |
| "grad_norm": 0.051016416400671005, | |
| "learning_rate": 0.0002169902912621359, | |
| "loss": 0.6223, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.0524876981957354, | |
| "grad_norm": 0.10985545814037323, | |
| "learning_rate": 0.00021674757281553398, | |
| "loss": 0.7192, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.0546746856205578, | |
| "grad_norm": 0.13058781623840332, | |
| "learning_rate": 0.000216504854368932, | |
| "loss": 0.6935, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.05686167304538, | |
| "grad_norm": 0.08312558382749557, | |
| "learning_rate": 0.00021626213592233008, | |
| "loss": 0.7496, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.0590486604702023, | |
| "grad_norm": 0.1301640123128891, | |
| "learning_rate": 0.00021601941747572816, | |
| "loss": 0.6052, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.0612356478950247, | |
| "grad_norm": 0.09434215724468231, | |
| "learning_rate": 0.00021577669902912618, | |
| "loss": 0.7594, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.0634226353198468, | |
| "grad_norm": 0.10083501785993576, | |
| "learning_rate": 0.00021553398058252426, | |
| "loss": 0.7177, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.0656096227446692, | |
| "grad_norm": 0.055385932326316833, | |
| "learning_rate": 0.00021529126213592234, | |
| "loss": 0.5035, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.0677966101694916, | |
| "grad_norm": 0.04303668439388275, | |
| "learning_rate": 0.00021504854368932036, | |
| "loss": 0.6053, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.0699835975943137, | |
| "grad_norm": 0.10392434149980545, | |
| "learning_rate": 0.00021480582524271844, | |
| "loss": 0.7089, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.072170585019136, | |
| "grad_norm": 0.08201673626899719, | |
| "learning_rate": 0.0002145631067961165, | |
| "loss": 0.7632, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.0743575724439585, | |
| "grad_norm": 0.045085739344358444, | |
| "learning_rate": 0.00021432038834951454, | |
| "loss": 0.6817, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.0765445598687808, | |
| "grad_norm": 0.07135611772537231, | |
| "learning_rate": 0.00021407766990291262, | |
| "loss": 0.6935, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.078731547293603, | |
| "grad_norm": 0.05138889327645302, | |
| "learning_rate": 0.00021383495145631067, | |
| "loss": 0.4929, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.0809185347184254, | |
| "grad_norm": 0.08583348989486694, | |
| "learning_rate": 0.00021359223300970872, | |
| "loss": 0.5905, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.0831055221432477, | |
| "grad_norm": 0.04269842803478241, | |
| "learning_rate": 0.00021334951456310677, | |
| "loss": 0.5433, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.0852925095680699, | |
| "grad_norm": 0.08027777820825577, | |
| "learning_rate": 0.00021310679611650485, | |
| "loss": 0.6686, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.0874794969928923, | |
| "grad_norm": 0.03481656312942505, | |
| "learning_rate": 0.0002128640776699029, | |
| "loss": 0.6998, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.0896664844177146, | |
| "grad_norm": 0.07240082323551178, | |
| "learning_rate": 0.00021262135922330095, | |
| "loss": 0.607, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.091853471842537, | |
| "grad_norm": 0.05366026610136032, | |
| "learning_rate": 0.000212378640776699, | |
| "loss": 1.2503, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.0940404592673592, | |
| "grad_norm": 0.07942108064889908, | |
| "learning_rate": 0.00021213592233009705, | |
| "loss": 0.5248, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.0962274466921815, | |
| "grad_norm": 0.10274556279182434, | |
| "learning_rate": 0.00021189320388349513, | |
| "loss": 0.7508, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.098414434117004, | |
| "grad_norm": 0.0703059732913971, | |
| "learning_rate": 0.00021165048543689318, | |
| "loss": 0.7144, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.100601421541826, | |
| "grad_norm": 0.05571672320365906, | |
| "learning_rate": 0.00021140776699029123, | |
| "loss": 0.6688, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.1027884089666484, | |
| "grad_norm": 0.06484287977218628, | |
| "learning_rate": 0.0002111650485436893, | |
| "loss": 0.8054, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.1049753963914708, | |
| "grad_norm": 0.06399201601743698, | |
| "learning_rate": 0.00021092233009708736, | |
| "loss": 0.7235, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.1071623838162932, | |
| "grad_norm": 0.10094781219959259, | |
| "learning_rate": 0.0002106796116504854, | |
| "loss": 0.7598, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.1093493712411153, | |
| "grad_norm": 0.058417245745658875, | |
| "learning_rate": 0.00021043689320388349, | |
| "loss": 0.7093, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.1115363586659377, | |
| "grad_norm": 0.07317756116390228, | |
| "learning_rate": 0.0002101941747572815, | |
| "loss": 0.4897, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.11372334609076, | |
| "grad_norm": 0.06851299107074738, | |
| "learning_rate": 0.00020995145631067959, | |
| "loss": 0.6625, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.1159103335155822, | |
| "grad_norm": 0.02538035437464714, | |
| "learning_rate": 0.00020970873786407766, | |
| "loss": 0.5364, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.1180973209404046, | |
| "grad_norm": 0.030080309137701988, | |
| "learning_rate": 0.0002094660194174757, | |
| "loss": 0.5308, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.120284308365227, | |
| "grad_norm": 0.053607307374477386, | |
| "learning_rate": 0.00020922330097087376, | |
| "loss": 0.6043, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.1224712957900491, | |
| "grad_norm": 0.06043059006333351, | |
| "learning_rate": 0.00020898058252427184, | |
| "loss": 0.7792, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.1246582832148715, | |
| "grad_norm": 0.0692690908908844, | |
| "learning_rate": 0.00020873786407766987, | |
| "loss": 0.6497, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.1268452706396939, | |
| "grad_norm": 0.041172128170728683, | |
| "learning_rate": 0.00020849514563106794, | |
| "loss": 0.5175, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.129032258064516, | |
| "grad_norm": 0.030904235318303108, | |
| "learning_rate": 0.00020825242718446602, | |
| "loss": 0.7253, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.1312192454893384, | |
| "grad_norm": 0.10942153632640839, | |
| "learning_rate": 0.00020800970873786404, | |
| "loss": 0.486, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.1334062329141608, | |
| "grad_norm": 0.05069408193230629, | |
| "learning_rate": 0.00020776699029126212, | |
| "loss": 0.6905, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.1355932203389831, | |
| "grad_norm": 0.08336110413074493, | |
| "learning_rate": 0.00020752427184466017, | |
| "loss": 0.3925, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.1377802077638053, | |
| "grad_norm": 0.07145116478204727, | |
| "learning_rate": 0.00020728155339805822, | |
| "loss": 0.5243, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.1399671951886277, | |
| "grad_norm": 0.06341274827718735, | |
| "learning_rate": 0.0002070388349514563, | |
| "loss": 0.541, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.14215418261345, | |
| "grad_norm": 0.07657375186681747, | |
| "learning_rate": 0.00020679611650485435, | |
| "loss": 0.6905, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.1443411700382722, | |
| "grad_norm": 0.055964358150959015, | |
| "learning_rate": 0.0002065533980582524, | |
| "loss": 0.7425, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.1465281574630946, | |
| "grad_norm": 0.046841271221637726, | |
| "learning_rate": 0.00020631067961165045, | |
| "loss": 0.6434, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.148715144887917, | |
| "grad_norm": 0.05508289113640785, | |
| "learning_rate": 0.00020606796116504853, | |
| "loss": 0.4618, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.1509021323127393, | |
| "grad_norm": 0.08871844410896301, | |
| "learning_rate": 0.00020582524271844658, | |
| "loss": 0.5587, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.1530891197375615, | |
| "grad_norm": 0.07026297599077225, | |
| "learning_rate": 0.00020558252427184463, | |
| "loss": 0.4971, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.1552761071623838, | |
| "grad_norm": 0.07413173466920853, | |
| "learning_rate": 0.0002053398058252427, | |
| "loss": 0.7721, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.1574630945872062, | |
| "grad_norm": 0.11381928622722626, | |
| "learning_rate": 0.00020509708737864073, | |
| "loss": 0.578, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.1596500820120283, | |
| "grad_norm": 0.11267655342817307, | |
| "learning_rate": 0.0002048543689320388, | |
| "loss": 0.7266, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.1618370694368507, | |
| "grad_norm": 0.05957537144422531, | |
| "learning_rate": 0.0002046116504854369, | |
| "loss": 0.6555, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.164024056861673, | |
| "grad_norm": 0.07506536692380905, | |
| "learning_rate": 0.0002043689320388349, | |
| "loss": 0.5954, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.1662110442864955, | |
| "grad_norm": 0.04716431349515915, | |
| "learning_rate": 0.000204126213592233, | |
| "loss": 0.7473, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.1683980317113176, | |
| "grad_norm": 0.076003298163414, | |
| "learning_rate": 0.00020388349514563107, | |
| "loss": 0.7488, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.17058501913614, | |
| "grad_norm": 0.07676059007644653, | |
| "learning_rate": 0.0002036407766990291, | |
| "loss": 0.5341, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.1727720065609624, | |
| "grad_norm": 0.037800587713718414, | |
| "learning_rate": 0.00020339805825242717, | |
| "loss": 0.6851, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.1749589939857845, | |
| "grad_norm": 0.05885176360607147, | |
| "learning_rate": 0.00020315533980582524, | |
| "loss": 0.5632, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.177145981410607, | |
| "grad_norm": 0.048130929470062256, | |
| "learning_rate": 0.00020291262135922327, | |
| "loss": 0.6632, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.1793329688354293, | |
| "grad_norm": 0.19910819828510284, | |
| "learning_rate": 0.00020266990291262135, | |
| "loss": 0.6354, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.1815199562602514, | |
| "grad_norm": 0.07047896087169647, | |
| "learning_rate": 0.00020242718446601942, | |
| "loss": 0.8063, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.1837069436850738, | |
| "grad_norm": 0.11079243570566177, | |
| "learning_rate": 0.00020218446601941745, | |
| "loss": 0.6696, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.1858939311098962, | |
| "grad_norm": 0.0585208535194397, | |
| "learning_rate": 0.00020194174757281552, | |
| "loss": 0.5988, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.1880809185347183, | |
| "grad_norm": 0.13656085729599, | |
| "learning_rate": 0.00020169902912621357, | |
| "loss": 0.5899, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.1902679059595407, | |
| "grad_norm": 0.0442144051194191, | |
| "learning_rate": 0.00020145631067961163, | |
| "loss": 0.6737, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.192454893384363, | |
| "grad_norm": 0.050628334283828735, | |
| "learning_rate": 0.0002012135922330097, | |
| "loss": 0.5829, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.1946418808091854, | |
| "grad_norm": 0.0832197517156601, | |
| "learning_rate": 0.00020097087378640775, | |
| "loss": 0.6445, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.1968288682340076, | |
| "grad_norm": 0.05916838347911835, | |
| "learning_rate": 0.0002007281553398058, | |
| "loss": 0.646, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.19901585565883, | |
| "grad_norm": 0.04812471941113472, | |
| "learning_rate": 0.00020048543689320385, | |
| "loss": 0.6882, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.2012028430836523, | |
| "grad_norm": 0.09752511233091354, | |
| "learning_rate": 0.00020024271844660193, | |
| "loss": 0.6365, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2033898305084745, | |
| "grad_norm": 0.07826195657253265, | |
| "learning_rate": 0.00019999999999999998, | |
| "loss": 0.6926, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.2055768179332969, | |
| "grad_norm": 0.07991279661655426, | |
| "learning_rate": 0.00019975728155339803, | |
| "loss": 0.8615, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.2077638053581192, | |
| "grad_norm": 0.06553139537572861, | |
| "learning_rate": 0.0001995145631067961, | |
| "loss": 0.648, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.2099507927829416, | |
| "grad_norm": 0.03314918279647827, | |
| "learning_rate": 0.00019927184466019413, | |
| "loss": 0.6059, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.2121377802077637, | |
| "grad_norm": 0.11380508542060852, | |
| "learning_rate": 0.0001990291262135922, | |
| "loss": 0.723, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.2143247676325861, | |
| "grad_norm": 0.07652546465396881, | |
| "learning_rate": 0.0001987864077669903, | |
| "loss": 0.7122, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.2165117550574085, | |
| "grad_norm": 0.03759431838989258, | |
| "learning_rate": 0.0001985436893203883, | |
| "loss": 0.367, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.2186987424822306, | |
| "grad_norm": 0.05255873501300812, | |
| "learning_rate": 0.0001983009708737864, | |
| "loss": 0.6797, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.220885729907053, | |
| "grad_norm": 0.051489852368831635, | |
| "learning_rate": 0.00019805825242718447, | |
| "loss": 0.5864, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.2230727173318754, | |
| "grad_norm": 0.11085233092308044, | |
| "learning_rate": 0.0001978155339805825, | |
| "loss": 0.5889, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.2252597047566978, | |
| "grad_norm": 0.06055936589837074, | |
| "learning_rate": 0.00019757281553398057, | |
| "loss": 0.6423, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.22744669218152, | |
| "grad_norm": 0.04101664200425148, | |
| "learning_rate": 0.00019733009708737865, | |
| "loss": 0.5727, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.2296336796063423, | |
| "grad_norm": 0.03171277046203613, | |
| "learning_rate": 0.00019708737864077667, | |
| "loss": 0.7185, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.2318206670311647, | |
| "grad_norm": 0.06788396835327148, | |
| "learning_rate": 0.00019684466019417475, | |
| "loss": 0.6122, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.2340076544559868, | |
| "grad_norm": 0.08057110011577606, | |
| "learning_rate": 0.0001966019417475728, | |
| "loss": 0.7807, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.2361946418808092, | |
| "grad_norm": 0.05691580846905708, | |
| "learning_rate": 0.00019635922330097085, | |
| "loss": 0.6171, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.2383816293056316, | |
| "grad_norm": 0.14753122627735138, | |
| "learning_rate": 0.00019611650485436893, | |
| "loss": 0.6721, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.2405686167304537, | |
| "grad_norm": 0.10009617358446121, | |
| "learning_rate": 0.00019587378640776698, | |
| "loss": 0.7494, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.242755604155276, | |
| "grad_norm": 0.07308393716812134, | |
| "learning_rate": 0.00019563106796116503, | |
| "loss": 0.6882, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.2449425915800985, | |
| "grad_norm": 0.062100425362586975, | |
| "learning_rate": 0.0001953883495145631, | |
| "loss": 0.8307, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.2471295790049206, | |
| "grad_norm": 0.06036095321178436, | |
| "learning_rate": 0.00019514563106796116, | |
| "loss": 0.6629, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.249316566429743, | |
| "grad_norm": 0.0912746712565422, | |
| "learning_rate": 0.0001949029126213592, | |
| "loss": 0.5981, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.2515035538545654, | |
| "grad_norm": 0.07584258913993835, | |
| "learning_rate": 0.00019466019417475726, | |
| "loss": 0.6035, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.2536905412793877, | |
| "grad_norm": 0.06942015886306763, | |
| "learning_rate": 0.00019441747572815533, | |
| "loss": 0.9066, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.2558775287042099, | |
| "grad_norm": 0.188585102558136, | |
| "learning_rate": 0.00019417475728155338, | |
| "loss": 0.5576, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.2580645161290323, | |
| "grad_norm": 0.06378776580095291, | |
| "learning_rate": 0.00019393203883495143, | |
| "loss": 0.7096, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.2602515035538546, | |
| "grad_norm": 0.093607597053051, | |
| "learning_rate": 0.0001936893203883495, | |
| "loss": 0.6168, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.2624384909786768, | |
| "grad_norm": 0.06908252835273743, | |
| "learning_rate": 0.00019344660194174754, | |
| "loss": 0.5409, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.2646254784034991, | |
| "grad_norm": 0.07703085243701935, | |
| "learning_rate": 0.00019320388349514561, | |
| "loss": 0.592, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.2668124658283215, | |
| "grad_norm": 0.068319171667099, | |
| "learning_rate": 0.0001929611650485437, | |
| "loss": 0.6522, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.268999453253144, | |
| "grad_norm": 0.04304174333810806, | |
| "learning_rate": 0.00019271844660194171, | |
| "loss": 0.8218, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.271186440677966, | |
| "grad_norm": 0.0900990441441536, | |
| "learning_rate": 0.0001924757281553398, | |
| "loss": 1.0345, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.2733734281027884, | |
| "grad_norm": 0.04922797903418541, | |
| "learning_rate": 0.00019223300970873787, | |
| "loss": 0.6132, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.2755604155276108, | |
| "grad_norm": 0.047148652374744415, | |
| "learning_rate": 0.0001919902912621359, | |
| "loss": 0.3725, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.277747402952433, | |
| "grad_norm": 0.2023891806602478, | |
| "learning_rate": 0.00019174757281553397, | |
| "loss": 0.5327, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.2799343903772553, | |
| "grad_norm": 0.11520843207836151, | |
| "learning_rate": 0.00019150485436893205, | |
| "loss": 0.8382, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.2821213778020777, | |
| "grad_norm": 0.07389979809522629, | |
| "learning_rate": 0.00019126213592233007, | |
| "loss": 0.568, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.2843083652269, | |
| "grad_norm": 0.055056143552064896, | |
| "learning_rate": 0.00019101941747572815, | |
| "loss": 0.6447, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.2864953526517222, | |
| "grad_norm": 0.06612139940261841, | |
| "learning_rate": 0.0001907766990291262, | |
| "loss": 0.5929, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.2886823400765446, | |
| "grad_norm": 0.0247971098870039, | |
| "learning_rate": 0.00019053398058252425, | |
| "loss": 0.5537, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.2908693275013667, | |
| "grad_norm": 0.15111826360225677, | |
| "learning_rate": 0.00019029126213592233, | |
| "loss": 0.7372, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.293056314926189, | |
| "grad_norm": 0.08081436157226562, | |
| "learning_rate": 0.00019004854368932038, | |
| "loss": 0.6935, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.2952433023510115, | |
| "grad_norm": 0.10907871276140213, | |
| "learning_rate": 0.00018980582524271843, | |
| "loss": 0.6666, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.2974302897758339, | |
| "grad_norm": 0.135869100689888, | |
| "learning_rate": 0.00018956310679611648, | |
| "loss": 0.6372, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.2996172772006562, | |
| "grad_norm": 0.034416068345308304, | |
| "learning_rate": 0.00018932038834951456, | |
| "loss": 0.4763, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.3018042646254784, | |
| "grad_norm": 0.12647610902786255, | |
| "learning_rate": 0.0001890776699029126, | |
| "loss": 0.8464, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.3039912520503008, | |
| "grad_norm": 0.057269513607025146, | |
| "learning_rate": 0.00018883495145631066, | |
| "loss": 0.5798, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.306178239475123, | |
| "grad_norm": 0.0600140281021595, | |
| "learning_rate": 0.00018859223300970874, | |
| "loss": 0.5804, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.3083652268999453, | |
| "grad_norm": 0.19099771976470947, | |
| "learning_rate": 0.0001883495145631068, | |
| "loss": 0.685, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.3105522143247677, | |
| "grad_norm": 0.050372760742902756, | |
| "learning_rate": 0.00018810679611650484, | |
| "loss": 0.5087, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.31273920174959, | |
| "grad_norm": 0.11331050843000412, | |
| "learning_rate": 0.00018786407766990291, | |
| "loss": 0.5442, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.3149261891744122, | |
| "grad_norm": 0.06795575469732285, | |
| "learning_rate": 0.00018762135922330094, | |
| "loss": 0.637, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.3171131765992345, | |
| "grad_norm": 0.16096124053001404, | |
| "learning_rate": 0.00018737864077669902, | |
| "loss": 0.696, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.319300164024057, | |
| "grad_norm": 0.06096538528800011, | |
| "learning_rate": 0.00018713592233009707, | |
| "loss": 0.6776, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.321487151448879, | |
| "grad_norm": 0.18116861581802368, | |
| "learning_rate": 0.00018689320388349512, | |
| "loss": 0.6609, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.3236741388737014, | |
| "grad_norm": 0.07999300211668015, | |
| "learning_rate": 0.0001866504854368932, | |
| "loss": 0.839, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.3258611262985238, | |
| "grad_norm": 0.06887108087539673, | |
| "learning_rate": 0.00018640776699029122, | |
| "loss": 0.4534, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.3280481137233462, | |
| "grad_norm": 0.06105407699942589, | |
| "learning_rate": 0.0001861650485436893, | |
| "loss": 0.5984, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.3302351011481683, | |
| "grad_norm": 0.07335031777620316, | |
| "learning_rate": 0.00018592233009708737, | |
| "loss": 0.8253, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.3324220885729907, | |
| "grad_norm": 0.0456896536052227, | |
| "learning_rate": 0.0001856796116504854, | |
| "loss": 0.7217, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.334609075997813, | |
| "grad_norm": 0.3200141191482544, | |
| "learning_rate": 0.00018543689320388347, | |
| "loss": 0.7451, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.3367960634226352, | |
| "grad_norm": 0.06651178002357483, | |
| "learning_rate": 0.00018519417475728155, | |
| "loss": 0.7326, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.3389830508474576, | |
| "grad_norm": 0.042020298540592194, | |
| "learning_rate": 0.00018495145631067957, | |
| "loss": 0.4071, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.34117003827228, | |
| "grad_norm": 0.07132259756326675, | |
| "learning_rate": 0.00018470873786407765, | |
| "loss": 0.7612, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.3433570256971024, | |
| "grad_norm": 0.11206962168216705, | |
| "learning_rate": 0.00018446601941747573, | |
| "loss": 0.6456, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.3455440131219245, | |
| "grad_norm": 0.05653239041566849, | |
| "learning_rate": 0.00018422330097087375, | |
| "loss": 0.8114, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.3477310005467469, | |
| "grad_norm": 0.14098510146141052, | |
| "learning_rate": 0.00018398058252427183, | |
| "loss": 0.7555, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.349917987971569, | |
| "grad_norm": 0.07498617470264435, | |
| "learning_rate": 0.00018373786407766988, | |
| "loss": 0.6413, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.3521049753963914, | |
| "grad_norm": 0.06883811205625534, | |
| "learning_rate": 0.00018349514563106793, | |
| "loss": 0.5517, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.3542919628212138, | |
| "grad_norm": 0.15621449053287506, | |
| "learning_rate": 0.000183252427184466, | |
| "loss": 0.7654, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.3564789502460362, | |
| "grad_norm": 0.10155853629112244, | |
| "learning_rate": 0.00018300970873786406, | |
| "loss": 0.7674, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.3586659376708585, | |
| "grad_norm": 0.11119658499956131, | |
| "learning_rate": 0.0001827669902912621, | |
| "loss": 0.8727, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.3608529250956807, | |
| "grad_norm": 0.08053936064243317, | |
| "learning_rate": 0.00018252427184466016, | |
| "loss": 0.7164, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.363039912520503, | |
| "grad_norm": 0.10677488148212433, | |
| "learning_rate": 0.00018228155339805824, | |
| "loss": 0.7041, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.3652268999453252, | |
| "grad_norm": 0.0674259215593338, | |
| "learning_rate": 0.0001820388349514563, | |
| "loss": 0.4037, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.3674138873701476, | |
| "grad_norm": 0.07473250478506088, | |
| "learning_rate": 0.00018179611650485434, | |
| "loss": 0.6386, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.36960087479497, | |
| "grad_norm": 0.1293839067220688, | |
| "learning_rate": 0.00018155339805825242, | |
| "loss": 0.6953, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.3717878622197923, | |
| "grad_norm": 0.11126802861690521, | |
| "learning_rate": 0.00018131067961165047, | |
| "loss": 0.4841, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.3739748496446145, | |
| "grad_norm": 0.053122710436582565, | |
| "learning_rate": 0.00018106796116504852, | |
| "loss": 0.6054, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.3761618370694368, | |
| "grad_norm": 0.10750580579042435, | |
| "learning_rate": 0.0001808252427184466, | |
| "loss": 0.6955, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.3783488244942592, | |
| "grad_norm": 0.10743537545204163, | |
| "learning_rate": 0.00018058252427184462, | |
| "loss": 0.6787, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.3805358119190814, | |
| "grad_norm": 0.04577861353754997, | |
| "learning_rate": 0.0001803398058252427, | |
| "loss": 0.6591, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.3827227993439037, | |
| "grad_norm": 0.09178069978952408, | |
| "learning_rate": 0.00018009708737864077, | |
| "loss": 0.6936, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.3849097867687261, | |
| "grad_norm": 0.1732717752456665, | |
| "learning_rate": 0.0001798543689320388, | |
| "loss": 0.7492, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.3870967741935485, | |
| "grad_norm": 0.054739419370889664, | |
| "learning_rate": 0.00017961165048543688, | |
| "loss": 0.5586, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.3892837616183706, | |
| "grad_norm": 0.11255872249603271, | |
| "learning_rate": 0.00017936893203883495, | |
| "loss": 0.636, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.391470749043193, | |
| "grad_norm": 0.14757251739501953, | |
| "learning_rate": 0.00017912621359223298, | |
| "loss": 0.5243, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.3936577364680154, | |
| "grad_norm": 0.044511694461107254, | |
| "learning_rate": 0.00017888349514563105, | |
| "loss": 0.4847, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.3958447238928375, | |
| "grad_norm": 0.09428581595420837, | |
| "learning_rate": 0.00017864077669902913, | |
| "loss": 1.1749, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.39803171131766, | |
| "grad_norm": 0.06652595102787018, | |
| "learning_rate": 0.00017839805825242716, | |
| "loss": 0.8209, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4002186987424823, | |
| "grad_norm": 0.087835893034935, | |
| "learning_rate": 0.00017815533980582523, | |
| "loss": 0.5784, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.4024056861673047, | |
| "grad_norm": 0.09346538782119751, | |
| "learning_rate": 0.00017791262135922328, | |
| "loss": 0.5932, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.4045926735921268, | |
| "grad_norm": 0.10662012547254562, | |
| "learning_rate": 0.00017766990291262133, | |
| "loss": 0.6372, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.4067796610169492, | |
| "grad_norm": 0.11840318143367767, | |
| "learning_rate": 0.0001774271844660194, | |
| "loss": 0.6369, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.4089666484417713, | |
| "grad_norm": 0.06892179697751999, | |
| "learning_rate": 0.00017718446601941746, | |
| "loss": 0.5671, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.4111536358665937, | |
| "grad_norm": 0.10891327261924744, | |
| "learning_rate": 0.0001769417475728155, | |
| "loss": 0.9284, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.413340623291416, | |
| "grad_norm": 0.09791342914104462, | |
| "learning_rate": 0.00017669902912621356, | |
| "loss": 0.7292, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.4155276107162384, | |
| "grad_norm": 0.09937909245491028, | |
| "learning_rate": 0.00017645631067961164, | |
| "loss": 0.8155, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.4177145981410608, | |
| "grad_norm": 0.09038559347391129, | |
| "learning_rate": 0.0001762135922330097, | |
| "loss": 0.4662, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.419901585565883, | |
| "grad_norm": 0.058239519596099854, | |
| "learning_rate": 0.00017597087378640774, | |
| "loss": 0.6582, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.4220885729907053, | |
| "grad_norm": 0.0631416067481041, | |
| "learning_rate": 0.00017572815533980582, | |
| "loss": 0.695, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.4242755604155275, | |
| "grad_norm": 0.1288028061389923, | |
| "learning_rate": 0.00017548543689320387, | |
| "loss": 0.648, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.4264625478403499, | |
| "grad_norm": 0.03047804720699787, | |
| "learning_rate": 0.00017524271844660192, | |
| "loss": 0.5446, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.4286495352651722, | |
| "grad_norm": 0.060944609344005585, | |
| "learning_rate": 0.000175, | |
| "loss": 0.658, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.4308365226899946, | |
| "grad_norm": 0.056785643100738525, | |
| "learning_rate": 0.00017475728155339802, | |
| "loss": 0.5258, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.4330235101148168, | |
| "grad_norm": 0.058587562292814255, | |
| "learning_rate": 0.0001745145631067961, | |
| "loss": 0.6653, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.4352104975396391, | |
| "grad_norm": 0.22061674296855927, | |
| "learning_rate": 0.00017427184466019418, | |
| "loss": 0.7656, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.4373974849644615, | |
| "grad_norm": 0.06048043444752693, | |
| "learning_rate": 0.0001740291262135922, | |
| "loss": 0.7472, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.4395844723892837, | |
| "grad_norm": 0.10482992976903915, | |
| "learning_rate": 0.00017378640776699028, | |
| "loss": 0.6674, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.441771459814106, | |
| "grad_norm": 0.08470641821622849, | |
| "learning_rate": 0.00017354368932038836, | |
| "loss": 0.665, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.4439584472389284, | |
| "grad_norm": 0.11567983031272888, | |
| "learning_rate": 0.00017330097087378638, | |
| "loss": 0.7653, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.4461454346637508, | |
| "grad_norm": 0.09336879104375839, | |
| "learning_rate": 0.00017305825242718446, | |
| "loss": 0.897, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.448332422088573, | |
| "grad_norm": 0.1675858199596405, | |
| "learning_rate": 0.00017281553398058253, | |
| "loss": 0.829, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.4505194095133953, | |
| "grad_norm": 0.06564716249704361, | |
| "learning_rate": 0.00017257281553398056, | |
| "loss": 0.7922, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.4527063969382177, | |
| "grad_norm": 0.10239650309085846, | |
| "learning_rate": 0.00017233009708737864, | |
| "loss": 0.6929, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.4548933843630398, | |
| "grad_norm": 0.053650129586458206, | |
| "learning_rate": 0.00017208737864077669, | |
| "loss": 0.6141, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.4570803717878622, | |
| "grad_norm": 0.07837307453155518, | |
| "learning_rate": 0.00017184466019417474, | |
| "loss": 0.8482, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.4592673592126846, | |
| "grad_norm": 0.03846685588359833, | |
| "learning_rate": 0.00017160194174757281, | |
| "loss": 0.7327, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.461454346637507, | |
| "grad_norm": 0.059876035898923874, | |
| "learning_rate": 0.00017135922330097086, | |
| "loss": 0.8028, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.463641334062329, | |
| "grad_norm": 0.061472248286008835, | |
| "learning_rate": 0.00017111650485436891, | |
| "loss": 0.6939, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.4658283214871515, | |
| "grad_norm": 0.12179437279701233, | |
| "learning_rate": 0.00017087378640776697, | |
| "loss": 0.6649, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.4680153089119736, | |
| "grad_norm": 0.09186365455389023, | |
| "learning_rate": 0.00017063106796116504, | |
| "loss": 0.6011, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.470202296336796, | |
| "grad_norm": 0.05658876150846481, | |
| "learning_rate": 0.0001703883495145631, | |
| "loss": 0.6425, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.4723892837616184, | |
| "grad_norm": 0.08521269261837006, | |
| "learning_rate": 0.00017014563106796114, | |
| "loss": 0.6321, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.4745762711864407, | |
| "grad_norm": 0.05281525105237961, | |
| "learning_rate": 0.00016990291262135922, | |
| "loss": 0.6789, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.4767632586112631, | |
| "grad_norm": 0.08585121482610703, | |
| "learning_rate": 0.00016966019417475724, | |
| "loss": 0.6413, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.4789502460360853, | |
| "grad_norm": 0.07132274657487869, | |
| "learning_rate": 0.00016941747572815532, | |
| "loss": 0.9009, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.4811372334609076, | |
| "grad_norm": 0.06283555924892426, | |
| "learning_rate": 0.0001691747572815534, | |
| "loss": 0.5174, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.4833242208857298, | |
| "grad_norm": 0.12625133991241455, | |
| "learning_rate": 0.00016893203883495142, | |
| "loss": 0.7434, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.4855112083105522, | |
| "grad_norm": 0.08590378612279892, | |
| "learning_rate": 0.0001686893203883495, | |
| "loss": 0.6517, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.4876981957353745, | |
| "grad_norm": 0.07998991757631302, | |
| "learning_rate": 0.00016844660194174758, | |
| "loss": 0.8598, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.489885183160197, | |
| "grad_norm": 0.08209861814975739, | |
| "learning_rate": 0.0001682038834951456, | |
| "loss": 0.5312, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.492072170585019, | |
| "grad_norm": 0.08130457997322083, | |
| "learning_rate": 0.00016796116504854368, | |
| "loss": 0.6749, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.4942591580098414, | |
| "grad_norm": 0.06565511971712112, | |
| "learning_rate": 0.00016771844660194176, | |
| "loss": 0.5273, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.4964461454346638, | |
| "grad_norm": 0.03930141031742096, | |
| "learning_rate": 0.00016747572815533978, | |
| "loss": 0.7349, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.498633132859486, | |
| "grad_norm": 0.08142934739589691, | |
| "learning_rate": 0.00016723300970873786, | |
| "loss": 0.7705, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.5008201202843083, | |
| "grad_norm": 0.11566407233476639, | |
| "learning_rate": 0.0001669902912621359, | |
| "loss": 0.6753, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.5030071077091307, | |
| "grad_norm": 0.06479275226593018, | |
| "learning_rate": 0.00016674757281553396, | |
| "loss": 0.6477, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.505194095133953, | |
| "grad_norm": 0.07941816747188568, | |
| "learning_rate": 0.00016650485436893204, | |
| "loss": 0.7045, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.5073810825587752, | |
| "grad_norm": 0.12151027470827103, | |
| "learning_rate": 0.0001662621359223301, | |
| "loss": 0.6896, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5095680699835976, | |
| "grad_norm": 0.08807893842458725, | |
| "learning_rate": 0.00016601941747572814, | |
| "loss": 0.6048, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.5117550574084198, | |
| "grad_norm": 0.03695622459053993, | |
| "learning_rate": 0.00016577669902912622, | |
| "loss": 0.6582, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.5139420448332421, | |
| "grad_norm": 0.05755792185664177, | |
| "learning_rate": 0.00016553398058252427, | |
| "loss": 0.6326, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.5161290322580645, | |
| "grad_norm": 0.1037437841296196, | |
| "learning_rate": 0.00016529126213592232, | |
| "loss": 0.6608, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.5183160196828869, | |
| "grad_norm": 0.0674048662185669, | |
| "learning_rate": 0.00016504854368932037, | |
| "loss": 0.5077, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.5205030071077092, | |
| "grad_norm": 0.1347750574350357, | |
| "learning_rate": 0.00016480582524271844, | |
| "loss": 0.6933, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.5226899945325314, | |
| "grad_norm": 0.05024397000670433, | |
| "learning_rate": 0.0001645631067961165, | |
| "loss": 0.7558, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.5248769819573538, | |
| "grad_norm": 0.0631837323307991, | |
| "learning_rate": 0.00016432038834951455, | |
| "loss": 0.5842, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.527063969382176, | |
| "grad_norm": 0.09257268905639648, | |
| "learning_rate": 0.00016407766990291262, | |
| "loss": 0.6996, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.5292509568069983, | |
| "grad_norm": 0.11887294054031372, | |
| "learning_rate": 0.00016383495145631065, | |
| "loss": 0.5101, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.5314379442318207, | |
| "grad_norm": 0.07113930583000183, | |
| "learning_rate": 0.00016359223300970872, | |
| "loss": 0.7546, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.533624931656643, | |
| "grad_norm": 0.10183428227901459, | |
| "learning_rate": 0.0001633495145631068, | |
| "loss": 0.6312, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.5358119190814654, | |
| "grad_norm": 0.06275193393230438, | |
| "learning_rate": 0.00016310679611650483, | |
| "loss": 0.5942, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.5379989065062876, | |
| "grad_norm": 0.06637417525053024, | |
| "learning_rate": 0.0001628640776699029, | |
| "loss": 0.6245, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.54018589393111, | |
| "grad_norm": 0.11270612478256226, | |
| "learning_rate": 0.00016262135922330098, | |
| "loss": 0.9069, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.542372881355932, | |
| "grad_norm": 0.07012913376092911, | |
| "learning_rate": 0.000162378640776699, | |
| "loss": 0.7436, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.5445598687807545, | |
| "grad_norm": 0.07748250663280487, | |
| "learning_rate": 0.00016213592233009708, | |
| "loss": 0.6251, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.5467468562055768, | |
| "grad_norm": 0.0590163953602314, | |
| "learning_rate": 0.0001618932038834951, | |
| "loss": 0.7188, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.5489338436303992, | |
| "grad_norm": 0.042455244809389114, | |
| "learning_rate": 0.00016165048543689318, | |
| "loss": 0.6997, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.5511208310552216, | |
| "grad_norm": 0.13864417374134064, | |
| "learning_rate": 0.00016140776699029126, | |
| "loss": 0.7744, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.5533078184800437, | |
| "grad_norm": 0.06101079657673836, | |
| "learning_rate": 0.00016116504854368928, | |
| "loss": 0.658, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.5554948059048659, | |
| "grad_norm": 0.07164673507213593, | |
| "learning_rate": 0.00016092233009708736, | |
| "loss": 0.5964, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.5576817933296883, | |
| "grad_norm": 0.1606537252664566, | |
| "learning_rate": 0.00016067961165048544, | |
| "loss": 0.7858, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.5598687807545106, | |
| "grad_norm": 0.06193002313375473, | |
| "learning_rate": 0.00016043689320388346, | |
| "loss": 0.9423, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.562055768179333, | |
| "grad_norm": 0.11733701825141907, | |
| "learning_rate": 0.00016019417475728154, | |
| "loss": 0.6538, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.5642427556041554, | |
| "grad_norm": 0.0617966391146183, | |
| "learning_rate": 0.00015995145631067962, | |
| "loss": 0.8811, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.5664297430289775, | |
| "grad_norm": 0.045651547610759735, | |
| "learning_rate": 0.00015970873786407764, | |
| "loss": 0.4535, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.5686167304538, | |
| "grad_norm": 0.05577806383371353, | |
| "learning_rate": 0.00015946601941747572, | |
| "loss": 0.8585, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.570803717878622, | |
| "grad_norm": 0.04707050323486328, | |
| "learning_rate": 0.00015922330097087377, | |
| "loss": 0.6141, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.5729907053034444, | |
| "grad_norm": 0.10123418271541595, | |
| "learning_rate": 0.00015898058252427182, | |
| "loss": 0.6322, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.5751776927282668, | |
| "grad_norm": 0.058243077248334885, | |
| "learning_rate": 0.0001587378640776699, | |
| "loss": 0.4855, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.5773646801530892, | |
| "grad_norm": 0.07926628738641739, | |
| "learning_rate": 0.00015849514563106795, | |
| "loss": 0.7209, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.5795516675779115, | |
| "grad_norm": 0.053030792623758316, | |
| "learning_rate": 0.000158252427184466, | |
| "loss": 0.5231, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.5817386550027337, | |
| "grad_norm": 0.10164280235767365, | |
| "learning_rate": 0.00015800970873786405, | |
| "loss": 0.6191, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.583925642427556, | |
| "grad_norm": 0.080946184694767, | |
| "learning_rate": 0.00015776699029126213, | |
| "loss": 0.4119, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.5861126298523782, | |
| "grad_norm": 0.10418912023305893, | |
| "learning_rate": 0.00015752427184466018, | |
| "loss": 0.6786, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.5882996172772006, | |
| "grad_norm": 0.08983733505010605, | |
| "learning_rate": 0.00015728155339805823, | |
| "loss": 0.7682, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.590486604702023, | |
| "grad_norm": 0.07781610637903214, | |
| "learning_rate": 0.0001570388349514563, | |
| "loss": 0.6476, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.5926735921268453, | |
| "grad_norm": 0.048014696687459946, | |
| "learning_rate": 0.00015679611650485433, | |
| "loss": 0.5202, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.5948605795516677, | |
| "grad_norm": 0.04506755992770195, | |
| "learning_rate": 0.0001565533980582524, | |
| "loss": 0.6491, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.5970475669764899, | |
| "grad_norm": 0.0790894404053688, | |
| "learning_rate": 0.00015631067961165048, | |
| "loss": 0.8726, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.5992345544013122, | |
| "grad_norm": 0.0461219921708107, | |
| "learning_rate": 0.0001560679611650485, | |
| "loss": 0.6627, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.6014215418261344, | |
| "grad_norm": 0.055332399904727936, | |
| "learning_rate": 0.00015582524271844658, | |
| "loss": 0.6403, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.6036085292509568, | |
| "grad_norm": 0.06580759584903717, | |
| "learning_rate": 0.00015558252427184466, | |
| "loss": 0.6241, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.6057955166757791, | |
| "grad_norm": 0.05455014482140541, | |
| "learning_rate": 0.00015533980582524269, | |
| "loss": 0.5521, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.6079825041006015, | |
| "grad_norm": 0.0879889652132988, | |
| "learning_rate": 0.00015509708737864076, | |
| "loss": 0.8286, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.6101694915254239, | |
| "grad_norm": 0.07845278829336166, | |
| "learning_rate": 0.00015485436893203884, | |
| "loss": 0.6807, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.612356478950246, | |
| "grad_norm": 0.1058611050248146, | |
| "learning_rate": 0.00015461165048543686, | |
| "loss": 0.7281, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.6145434663750682, | |
| "grad_norm": 0.09188025444746017, | |
| "learning_rate": 0.00015436893203883494, | |
| "loss": 0.9997, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.6167304537998906, | |
| "grad_norm": 0.07776019722223282, | |
| "learning_rate": 0.000154126213592233, | |
| "loss": 0.6304, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.618917441224713, | |
| "grad_norm": 0.11982621252536774, | |
| "learning_rate": 0.00015388349514563104, | |
| "loss": 0.7336, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.6211044286495353, | |
| "grad_norm": 0.13907882571220398, | |
| "learning_rate": 0.00015364077669902912, | |
| "loss": 0.7224, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.6232914160743577, | |
| "grad_norm": 0.08288092911243439, | |
| "learning_rate": 0.00015339805825242717, | |
| "loss": 0.6346, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.6254784034991798, | |
| "grad_norm": 0.07805322110652924, | |
| "learning_rate": 0.00015315533980582522, | |
| "loss": 0.7299, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.6276653909240022, | |
| "grad_norm": 0.09408170729875565, | |
| "learning_rate": 0.0001529126213592233, | |
| "loss": 0.7052, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.6298523783488243, | |
| "grad_norm": 0.05800126492977142, | |
| "learning_rate": 0.00015266990291262135, | |
| "loss": 0.7289, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.6320393657736467, | |
| "grad_norm": 0.07356402277946472, | |
| "learning_rate": 0.0001524271844660194, | |
| "loss": 0.7533, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.634226353198469, | |
| "grad_norm": 0.045158419758081436, | |
| "learning_rate": 0.00015218446601941745, | |
| "loss": 0.4564, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.6364133406232915, | |
| "grad_norm": 0.05181990936398506, | |
| "learning_rate": 0.00015194174757281553, | |
| "loss": 0.5431, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.6386003280481138, | |
| "grad_norm": 0.12281020730733871, | |
| "learning_rate": 0.00015169902912621358, | |
| "loss": 0.9977, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.640787315472936, | |
| "grad_norm": 0.10029571503400803, | |
| "learning_rate": 0.00015145631067961163, | |
| "loss": 0.4878, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.6429743028977584, | |
| "grad_norm": 0.27176743745803833, | |
| "learning_rate": 0.0001512135922330097, | |
| "loss": 0.4482, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.6451612903225805, | |
| "grad_norm": 0.07689711451530457, | |
| "learning_rate": 0.00015097087378640773, | |
| "loss": 0.6518, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.647348277747403, | |
| "grad_norm": 0.09448367357254028, | |
| "learning_rate": 0.0001507281553398058, | |
| "loss": 0.7245, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.6495352651722253, | |
| "grad_norm": 0.09228754788637161, | |
| "learning_rate": 0.00015048543689320389, | |
| "loss": 0.6763, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.6517222525970476, | |
| "grad_norm": 0.062209248542785645, | |
| "learning_rate": 0.0001502427184466019, | |
| "loss": 0.5551, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.65390924002187, | |
| "grad_norm": 0.13062815368175507, | |
| "learning_rate": 0.00015, | |
| "loss": 0.8073, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.6560962274466922, | |
| "grad_norm": 0.07445967942476273, | |
| "learning_rate": 0.00014975728155339804, | |
| "loss": 0.7943, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.6582832148715145, | |
| "grad_norm": 0.04933764040470123, | |
| "learning_rate": 0.00014951456310679611, | |
| "loss": 0.5679, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.6604702022963367, | |
| "grad_norm": 0.08024493604898453, | |
| "learning_rate": 0.00014927184466019417, | |
| "loss": 0.8256, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.662657189721159, | |
| "grad_norm": 0.07997776567935944, | |
| "learning_rate": 0.00014902912621359222, | |
| "loss": 0.651, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.6648441771459814, | |
| "grad_norm": 0.0992245227098465, | |
| "learning_rate": 0.0001487864077669903, | |
| "loss": 0.5129, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.6670311645708038, | |
| "grad_norm": 0.09031302481889725, | |
| "learning_rate": 0.00014854368932038834, | |
| "loss": 0.5117, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.6692181519956262, | |
| "grad_norm": 0.08211041986942291, | |
| "learning_rate": 0.0001483009708737864, | |
| "loss": 0.6673, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.6714051394204483, | |
| "grad_norm": 0.07226278632879257, | |
| "learning_rate": 0.00014805825242718447, | |
| "loss": 1.2885, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.6735921268452705, | |
| "grad_norm": 0.04525578394532204, | |
| "learning_rate": 0.00014781553398058252, | |
| "loss": 0.6017, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.6757791142700929, | |
| "grad_norm": 0.08551250398159027, | |
| "learning_rate": 0.00014757281553398057, | |
| "loss": 0.5044, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.6779661016949152, | |
| "grad_norm": 0.09782097488641739, | |
| "learning_rate": 0.00014733009708737862, | |
| "loss": 0.7231, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.6801530891197376, | |
| "grad_norm": 0.08225250244140625, | |
| "learning_rate": 0.00014708737864077667, | |
| "loss": 0.706, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.68234007654456, | |
| "grad_norm": 0.10632199048995972, | |
| "learning_rate": 0.00014684466019417475, | |
| "loss": 0.7661, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.6845270639693821, | |
| "grad_norm": 0.0609552338719368, | |
| "learning_rate": 0.0001466019417475728, | |
| "loss": 0.5343, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.6867140513942045, | |
| "grad_norm": 0.07229924201965332, | |
| "learning_rate": 0.00014635922330097085, | |
| "loss": 0.5003, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.6889010388190266, | |
| "grad_norm": 0.10183531045913696, | |
| "learning_rate": 0.0001461165048543689, | |
| "loss": 0.7977, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.691088026243849, | |
| "grad_norm": 0.05217605456709862, | |
| "learning_rate": 0.00014587378640776698, | |
| "loss": 0.6733, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.6932750136686714, | |
| "grad_norm": 0.14263714849948883, | |
| "learning_rate": 0.00014563106796116503, | |
| "loss": 0.7619, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.6954620010934938, | |
| "grad_norm": 0.07814032584428787, | |
| "learning_rate": 0.00014538834951456308, | |
| "loss": 0.7111, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.6976489885183161, | |
| "grad_norm": 0.10231511294841766, | |
| "learning_rate": 0.00014514563106796116, | |
| "loss": 0.7288, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.6998359759431383, | |
| "grad_norm": 0.08485434204339981, | |
| "learning_rate": 0.0001449029126213592, | |
| "loss": 0.6357, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.7020229633679607, | |
| "grad_norm": 0.07320071011781693, | |
| "learning_rate": 0.00014466019417475726, | |
| "loss": 0.6858, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.7042099507927828, | |
| "grad_norm": 0.059365514665842056, | |
| "learning_rate": 0.00014441747572815534, | |
| "loss": 0.5363, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.7063969382176052, | |
| "grad_norm": 0.18387210369110107, | |
| "learning_rate": 0.0001441747572815534, | |
| "loss": 0.377, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.7085839256424276, | |
| "grad_norm": 0.12494487315416336, | |
| "learning_rate": 0.00014393203883495144, | |
| "loss": 0.7818, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.71077091306725, | |
| "grad_norm": 0.06817726045846939, | |
| "learning_rate": 0.0001436893203883495, | |
| "loss": 0.6386, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.7129579004920723, | |
| "grad_norm": 0.24346807599067688, | |
| "learning_rate": 0.00014344660194174757, | |
| "loss": 0.6238, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.7151448879168945, | |
| "grad_norm": 0.11059845983982086, | |
| "learning_rate": 0.00014320388349514562, | |
| "loss": 0.6998, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.7173318753417168, | |
| "grad_norm": 0.08616719394922256, | |
| "learning_rate": 0.00014296116504854367, | |
| "loss": 0.6289, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.719518862766539, | |
| "grad_norm": 0.08878134191036224, | |
| "learning_rate": 0.00014271844660194175, | |
| "loss": 0.4928, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.7217058501913614, | |
| "grad_norm": 0.046061642467975616, | |
| "learning_rate": 0.0001424757281553398, | |
| "loss": 0.4056, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.7238928376161837, | |
| "grad_norm": 0.06339546293020248, | |
| "learning_rate": 0.00014223300970873785, | |
| "loss": 0.9644, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.726079825041006, | |
| "grad_norm": 0.08085539191961288, | |
| "learning_rate": 0.00014199029126213592, | |
| "loss": 0.589, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.7282668124658285, | |
| "grad_norm": 0.09716881811618805, | |
| "learning_rate": 0.00014174757281553398, | |
| "loss": 0.7105, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.7304537998906506, | |
| "grad_norm": 0.08055704832077026, | |
| "learning_rate": 0.00014150485436893203, | |
| "loss": 0.7049, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.7326407873154728, | |
| "grad_norm": 0.033506009727716446, | |
| "learning_rate": 0.00014126213592233008, | |
| "loss": 0.55, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.7348277747402951, | |
| "grad_norm": 0.08695266395807266, | |
| "learning_rate": 0.00014101941747572815, | |
| "loss": 0.8698, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.7370147621651175, | |
| "grad_norm": 0.10492417961359024, | |
| "learning_rate": 0.0001407766990291262, | |
| "loss": 0.5841, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.73920174958994, | |
| "grad_norm": 0.0260971337556839, | |
| "learning_rate": 0.00014053398058252425, | |
| "loss": 0.4715, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.7413887370147623, | |
| "grad_norm": 0.14263994991779327, | |
| "learning_rate": 0.0001402912621359223, | |
| "loss": 0.8341, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.7435757244395844, | |
| "grad_norm": 0.08548615872859955, | |
| "learning_rate": 0.00014004854368932036, | |
| "loss": 0.608, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.7457627118644068, | |
| "grad_norm": 0.11541998386383057, | |
| "learning_rate": 0.00013980582524271843, | |
| "loss": 0.8728, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.747949699289229, | |
| "grad_norm": 0.10113636404275894, | |
| "learning_rate": 0.00013956310679611648, | |
| "loss": 0.5703, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.7501366867140513, | |
| "grad_norm": 0.09393016993999481, | |
| "learning_rate": 0.00013932038834951453, | |
| "loss": 0.6279, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.7523236741388737, | |
| "grad_norm": 0.0811694860458374, | |
| "learning_rate": 0.0001390776699029126, | |
| "loss": 0.5184, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.754510661563696, | |
| "grad_norm": 0.10583005845546722, | |
| "learning_rate": 0.00013883495145631066, | |
| "loss": 0.6799, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.7566976489885184, | |
| "grad_norm": 0.09318964928388596, | |
| "learning_rate": 0.0001385922330097087, | |
| "loss": 0.8732, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.7588846364133406, | |
| "grad_norm": 0.07826653122901917, | |
| "learning_rate": 0.0001383495145631068, | |
| "loss": 0.8013, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.761071623838163, | |
| "grad_norm": 0.06191803142428398, | |
| "learning_rate": 0.00013810679611650484, | |
| "loss": 0.6102, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.763258611262985, | |
| "grad_norm": 0.08127430081367493, | |
| "learning_rate": 0.0001378640776699029, | |
| "loss": 1.0328, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.7654455986878075, | |
| "grad_norm": 0.046586234122514725, | |
| "learning_rate": 0.00013762135922330097, | |
| "loss": 0.8868, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.7676325861126299, | |
| "grad_norm": 0.08679065853357315, | |
| "learning_rate": 0.00013737864077669902, | |
| "loss": 0.4166, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.7698195735374522, | |
| "grad_norm": 0.11666709929704666, | |
| "learning_rate": 0.00013713592233009707, | |
| "loss": 0.7552, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.7720065609622746, | |
| "grad_norm": 0.08766624331474304, | |
| "learning_rate": 0.00013689320388349515, | |
| "loss": 0.7544, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.7741935483870968, | |
| "grad_norm": 0.04604002833366394, | |
| "learning_rate": 0.0001366504854368932, | |
| "loss": 0.6628, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.7763805358119191, | |
| "grad_norm": 0.08578313142061234, | |
| "learning_rate": 0.00013640776699029125, | |
| "loss": 0.5542, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.7785675232367413, | |
| "grad_norm": 0.07900494337081909, | |
| "learning_rate": 0.00013616504854368933, | |
| "loss": 0.8792, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.7807545106615636, | |
| "grad_norm": 0.08219628036022186, | |
| "learning_rate": 0.00013592233009708738, | |
| "loss": 0.6798, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.782941498086386, | |
| "grad_norm": 0.08442230522632599, | |
| "learning_rate": 0.00013567961165048543, | |
| "loss": 0.7634, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.7851284855112084, | |
| "grad_norm": 0.12211817502975464, | |
| "learning_rate": 0.00013543689320388348, | |
| "loss": 0.5944, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.7873154729360308, | |
| "grad_norm": 0.052023954689502716, | |
| "learning_rate": 0.00013519417475728156, | |
| "loss": 0.7413, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.789502460360853, | |
| "grad_norm": 0.05588607117533684, | |
| "learning_rate": 0.0001349514563106796, | |
| "loss": 0.6026, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.791689447785675, | |
| "grad_norm": 0.09938627481460571, | |
| "learning_rate": 0.00013470873786407766, | |
| "loss": 0.4907, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.7938764352104974, | |
| "grad_norm": 0.07085568457841873, | |
| "learning_rate": 0.0001344660194174757, | |
| "loss": 0.7812, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.7960634226353198, | |
| "grad_norm": 0.06302408128976822, | |
| "learning_rate": 0.00013422330097087376, | |
| "loss": 1.1212, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.7982504100601422, | |
| "grad_norm": 0.07288993149995804, | |
| "learning_rate": 0.00013398058252427184, | |
| "loss": 0.3592, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.8004373974849646, | |
| "grad_norm": 0.10824166238307953, | |
| "learning_rate": 0.00013373786407766989, | |
| "loss": 0.7594, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.8026243849097867, | |
| "grad_norm": 0.06682860851287842, | |
| "learning_rate": 0.00013349514563106794, | |
| "loss": 0.7621, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.804811372334609, | |
| "grad_norm": 0.058759741485118866, | |
| "learning_rate": 0.00013325242718446601, | |
| "loss": 0.5391, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.8069983597594312, | |
| "grad_norm": 0.06587909162044525, | |
| "learning_rate": 0.00013300970873786406, | |
| "loss": 0.8572, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.8091853471842536, | |
| "grad_norm": 0.08297627419233322, | |
| "learning_rate": 0.00013276699029126212, | |
| "loss": 0.5941, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.811372334609076, | |
| "grad_norm": 0.06561224162578583, | |
| "learning_rate": 0.0001325242718446602, | |
| "loss": 0.6561, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.8135593220338984, | |
| "grad_norm": 0.0836096704006195, | |
| "learning_rate": 0.00013228155339805824, | |
| "loss": 0.7184, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.8157463094587207, | |
| "grad_norm": 0.059412457048892975, | |
| "learning_rate": 0.0001320388349514563, | |
| "loss": 0.7218, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.8179332968835429, | |
| "grad_norm": 0.1909535527229309, | |
| "learning_rate": 0.00013179611650485437, | |
| "loss": 0.8877, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.8201202843083653, | |
| "grad_norm": 0.12529511749744415, | |
| "learning_rate": 0.00013155339805825242, | |
| "loss": 1.0633, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.8223072717331874, | |
| "grad_norm": 0.08508165180683136, | |
| "learning_rate": 0.00013131067961165047, | |
| "loss": 0.788, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.8244942591580098, | |
| "grad_norm": 0.13272306323051453, | |
| "learning_rate": 0.00013106796116504852, | |
| "loss": 0.7453, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.8266812465828322, | |
| "grad_norm": 0.08672458678483963, | |
| "learning_rate": 0.0001308252427184466, | |
| "loss": 0.5658, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.8288682340076545, | |
| "grad_norm": 0.05675327405333519, | |
| "learning_rate": 0.00013058252427184465, | |
| "loss": 0.7884, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.831055221432477, | |
| "grad_norm": 0.05675899609923363, | |
| "learning_rate": 0.0001303398058252427, | |
| "loss": 0.65, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.833242208857299, | |
| "grad_norm": 0.07699860632419586, | |
| "learning_rate": 0.00013009708737864078, | |
| "loss": 0.7413, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.8354291962821214, | |
| "grad_norm": 0.06006371229887009, | |
| "learning_rate": 0.00012985436893203883, | |
| "loss": 0.4697, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.8376161837069436, | |
| "grad_norm": 0.13671620190143585, | |
| "learning_rate": 0.00012961165048543688, | |
| "loss": 0.5431, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.839803171131766, | |
| "grad_norm": 0.07552420347929001, | |
| "learning_rate": 0.00012936893203883493, | |
| "loss": 0.6848, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.8419901585565883, | |
| "grad_norm": 0.07906556129455566, | |
| "learning_rate": 0.000129126213592233, | |
| "loss": 0.7319, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.8441771459814107, | |
| "grad_norm": 0.16908115148544312, | |
| "learning_rate": 0.00012888349514563106, | |
| "loss": 0.7233, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.846364133406233, | |
| "grad_norm": 0.05891953036189079, | |
| "learning_rate": 0.0001286407766990291, | |
| "loss": 0.469, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.8485511208310552, | |
| "grad_norm": 0.059181615710258484, | |
| "learning_rate": 0.00012839805825242716, | |
| "loss": 0.6132, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.8507381082558774, | |
| "grad_norm": 0.04733738303184509, | |
| "learning_rate": 0.00012815533980582524, | |
| "loss": 0.5983, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.8529250956806997, | |
| "grad_norm": 0.10546132922172546, | |
| "learning_rate": 0.0001279126213592233, | |
| "loss": 0.6189, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.8551120831055221, | |
| "grad_norm": 0.04662831500172615, | |
| "learning_rate": 0.00012766990291262134, | |
| "loss": 0.5541, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.8572990705303445, | |
| "grad_norm": 0.03423594310879707, | |
| "learning_rate": 0.0001274271844660194, | |
| "loss": 0.5169, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.8594860579551669, | |
| "grad_norm": 0.07898862659931183, | |
| "learning_rate": 0.00012718446601941747, | |
| "loss": 0.6811, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.861673045379989, | |
| "grad_norm": 0.06376733630895615, | |
| "learning_rate": 0.00012694174757281552, | |
| "loss": 0.6015, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.8638600328048114, | |
| "grad_norm": 0.11817888915538788, | |
| "learning_rate": 0.00012669902912621357, | |
| "loss": 0.7551, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.8660470202296335, | |
| "grad_norm": 0.04637736827135086, | |
| "learning_rate": 0.00012645631067961165, | |
| "loss": 0.6324, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.868234007654456, | |
| "grad_norm": 0.11976910382509232, | |
| "learning_rate": 0.0001262135922330097, | |
| "loss": 0.8507, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.8704209950792783, | |
| "grad_norm": 0.08364604413509369, | |
| "learning_rate": 0.00012597087378640775, | |
| "loss": 0.7253, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.8726079825041007, | |
| "grad_norm": 0.08944633603096008, | |
| "learning_rate": 0.00012572815533980582, | |
| "loss": 0.6782, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.874794969928923, | |
| "grad_norm": 0.14449813961982727, | |
| "learning_rate": 0.00012548543689320387, | |
| "loss": 0.8016, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.8769819573537452, | |
| "grad_norm": 0.07508597522974014, | |
| "learning_rate": 0.00012524271844660192, | |
| "loss": 0.7011, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.8791689447785676, | |
| "grad_norm": 0.07832931727170944, | |
| "learning_rate": 0.000125, | |
| "loss": 0.6775, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.8813559322033897, | |
| "grad_norm": 0.05429183319211006, | |
| "learning_rate": 0.00012475728155339805, | |
| "loss": 0.8538, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.883542919628212, | |
| "grad_norm": 0.05742572247982025, | |
| "learning_rate": 0.0001245145631067961, | |
| "loss": 0.5853, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.8857299070530344, | |
| "grad_norm": 0.07888288795948029, | |
| "learning_rate": 0.00012427184466019418, | |
| "loss": 0.6769, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.8879168944778568, | |
| "grad_norm": 0.1497015655040741, | |
| "learning_rate": 0.00012402912621359223, | |
| "loss": 0.769, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.8901038819026792, | |
| "grad_norm": 0.05974101275205612, | |
| "learning_rate": 0.00012378640776699028, | |
| "loss": 0.6417, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.8922908693275013, | |
| "grad_norm": 0.06933821737766266, | |
| "learning_rate": 0.00012354368932038833, | |
| "loss": 0.8024, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.8944778567523237, | |
| "grad_norm": 0.08834031224250793, | |
| "learning_rate": 0.0001233009708737864, | |
| "loss": 0.5945, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.8966648441771459, | |
| "grad_norm": 0.06061748042702675, | |
| "learning_rate": 0.00012305825242718446, | |
| "loss": 0.7633, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.8988518316019682, | |
| "grad_norm": 0.04227694123983383, | |
| "learning_rate": 0.0001228155339805825, | |
| "loss": 0.5573, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.9010388190267906, | |
| "grad_norm": 0.05616821348667145, | |
| "learning_rate": 0.00012257281553398056, | |
| "loss": 0.557, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.903225806451613, | |
| "grad_norm": 0.06088731437921524, | |
| "learning_rate": 0.0001223300970873786, | |
| "loss": 0.6185, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.9054127938764354, | |
| "grad_norm": 0.053946834057569504, | |
| "learning_rate": 0.0001220873786407767, | |
| "loss": 0.597, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.9075997813012575, | |
| "grad_norm": 0.10907828062772751, | |
| "learning_rate": 0.00012184466019417475, | |
| "loss": 0.6573, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.9097867687260797, | |
| "grad_norm": 0.18307583034038544, | |
| "learning_rate": 0.0001216019417475728, | |
| "loss": 0.6346, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.911973756150902, | |
| "grad_norm": 0.09590666741132736, | |
| "learning_rate": 0.00012135922330097087, | |
| "loss": 0.7205, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.9141607435757244, | |
| "grad_norm": 0.061872418969869614, | |
| "learning_rate": 0.00012111650485436892, | |
| "loss": 0.4657, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.9163477310005468, | |
| "grad_norm": 0.056165486574172974, | |
| "learning_rate": 0.00012087378640776697, | |
| "loss": 0.8182, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.9185347184253692, | |
| "grad_norm": 0.0575888454914093, | |
| "learning_rate": 0.00012063106796116505, | |
| "loss": 0.5488, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.9207217058501913, | |
| "grad_norm": 0.04158012568950653, | |
| "learning_rate": 0.0001203883495145631, | |
| "loss": 0.5699, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.9229086932750137, | |
| "grad_norm": 0.05806119740009308, | |
| "learning_rate": 0.00012014563106796115, | |
| "loss": 0.7537, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.9250956806998358, | |
| "grad_norm": 0.059674229472875595, | |
| "learning_rate": 0.00011990291262135923, | |
| "loss": 0.7854, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.9272826681246582, | |
| "grad_norm": 0.16574355959892273, | |
| "learning_rate": 0.00011966019417475728, | |
| "loss": 0.7689, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.9294696555494806, | |
| "grad_norm": 0.12689882516860962, | |
| "learning_rate": 0.00011941747572815533, | |
| "loss": 0.6747, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.931656642974303, | |
| "grad_norm": 0.0645318478345871, | |
| "learning_rate": 0.00011917475728155339, | |
| "loss": 0.6583, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.9338436303991253, | |
| "grad_norm": 0.0930570513010025, | |
| "learning_rate": 0.00011893203883495146, | |
| "loss": 0.5295, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.9360306178239475, | |
| "grad_norm": 0.16208307445049286, | |
| "learning_rate": 0.0001186893203883495, | |
| "loss": 0.8123, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.9382176052487698, | |
| "grad_norm": 0.11295409500598907, | |
| "learning_rate": 0.00011844660194174756, | |
| "loss": 0.8346, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.940404592673592, | |
| "grad_norm": 0.17686349153518677, | |
| "learning_rate": 0.00011820388349514562, | |
| "loss": 0.5105, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.9425915800984144, | |
| "grad_norm": 0.045305948704481125, | |
| "learning_rate": 0.00011796116504854367, | |
| "loss": 0.6025, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.9447785675232367, | |
| "grad_norm": 0.26349976658821106, | |
| "learning_rate": 0.00011771844660194173, | |
| "loss": 0.4674, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.9469655549480591, | |
| "grad_norm": 0.08130199462175369, | |
| "learning_rate": 0.0001174757281553398, | |
| "loss": 0.6241, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.9491525423728815, | |
| "grad_norm": 0.12196088582277298, | |
| "learning_rate": 0.00011723300970873785, | |
| "loss": 0.6839, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.9513395297977036, | |
| "grad_norm": 0.08335163444280624, | |
| "learning_rate": 0.0001169902912621359, | |
| "loss": 0.6898, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.953526517222526, | |
| "grad_norm": 0.06309807300567627, | |
| "learning_rate": 0.00011674757281553398, | |
| "loss": 0.5075, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.9557135046473482, | |
| "grad_norm": 0.09318676590919495, | |
| "learning_rate": 0.00011650485436893203, | |
| "loss": 0.6401, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.9579004920721705, | |
| "grad_norm": 0.0993434339761734, | |
| "learning_rate": 0.00011626213592233008, | |
| "loss": 0.5846, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.960087479496993, | |
| "grad_norm": 0.06654249131679535, | |
| "learning_rate": 0.00011601941747572814, | |
| "loss": 0.4916, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 1.9622744669218153, | |
| "grad_norm": 0.06536954641342163, | |
| "learning_rate": 0.0001157766990291262, | |
| "loss": 0.7887, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.9644614543466377, | |
| "grad_norm": 0.08677750080823898, | |
| "learning_rate": 0.00011553398058252426, | |
| "loss": 0.4943, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 1.9666484417714598, | |
| "grad_norm": 0.09694842249155045, | |
| "learning_rate": 0.00011529126213592232, | |
| "loss": 0.6401, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.968835429196282, | |
| "grad_norm": 0.045057326555252075, | |
| "learning_rate": 0.00011504854368932037, | |
| "loss": 0.7471, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 1.9710224166211043, | |
| "grad_norm": 0.13406231999397278, | |
| "learning_rate": 0.00011480582524271844, | |
| "loss": 0.6352, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 1.9732094040459267, | |
| "grad_norm": 0.07525835931301117, | |
| "learning_rate": 0.0001145631067961165, | |
| "loss": 0.6649, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 1.975396391470749, | |
| "grad_norm": 0.08038943260908127, | |
| "learning_rate": 0.00011432038834951455, | |
| "loss": 0.5342, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 1.9775833788955715, | |
| "grad_norm": 0.11779956519603729, | |
| "learning_rate": 0.0001140776699029126, | |
| "loss": 0.7336, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.9797703663203936, | |
| "grad_norm": 0.1321367770433426, | |
| "learning_rate": 0.00011383495145631068, | |
| "loss": 0.7451, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 1.981957353745216, | |
| "grad_norm": 0.12764184176921844, | |
| "learning_rate": 0.00011359223300970873, | |
| "loss": 0.7046, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 1.9841443411700381, | |
| "grad_norm": 0.04532116651535034, | |
| "learning_rate": 0.00011334951456310678, | |
| "loss": 0.7603, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 1.9863313285948605, | |
| "grad_norm": 0.043816618621349335, | |
| "learning_rate": 0.00011310679611650484, | |
| "loss": 0.8493, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 1.9885183160196829, | |
| "grad_norm": 0.07102162390947342, | |
| "learning_rate": 0.00011286407766990291, | |
| "loss": 0.6751, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.9907053034445052, | |
| "grad_norm": 0.07698041200637817, | |
| "learning_rate": 0.00011262135922330096, | |
| "loss": 0.8755, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 1.9928922908693276, | |
| "grad_norm": 0.07542385905981064, | |
| "learning_rate": 0.00011237864077669902, | |
| "loss": 0.6766, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 1.9950792782941498, | |
| "grad_norm": 0.08312849700450897, | |
| "learning_rate": 0.00011213592233009707, | |
| "loss": 0.665, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 1.9972662657189721, | |
| "grad_norm": 0.08149036020040512, | |
| "learning_rate": 0.00011189320388349514, | |
| "loss": 0.5601, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 1.9994532531437943, | |
| "grad_norm": 0.08313512802124023, | |
| "learning_rate": 0.0001116504854368932, | |
| "loss": 0.7797, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.2948603332042694, | |
| "learning_rate": 0.00011140776699029125, | |
| "loss": 0.7282, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.0021869874248224, | |
| "grad_norm": 0.05372385308146477, | |
| "learning_rate": 0.0001111650485436893, | |
| "loss": 0.5473, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.0043739748496447, | |
| "grad_norm": 0.09125916659832001, | |
| "learning_rate": 0.00011092233009708738, | |
| "loss": 0.5244, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.006560962274467, | |
| "grad_norm": 0.10436462610960007, | |
| "learning_rate": 0.00011067961165048543, | |
| "loss": 0.6836, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.008747949699289, | |
| "grad_norm": 0.0616326779127121, | |
| "learning_rate": 0.00011043689320388348, | |
| "loss": 0.5314, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.0109349371241114, | |
| "grad_norm": 0.0691252276301384, | |
| "learning_rate": 0.00011019417475728154, | |
| "loss": 0.4933, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.013121924548934, | |
| "grad_norm": 0.06891285628080368, | |
| "learning_rate": 0.00010995145631067961, | |
| "loss": 0.577, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.015308911973756, | |
| "grad_norm": 0.07588429003953934, | |
| "learning_rate": 0.00010970873786407766, | |
| "loss": 0.78, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.0174958993985785, | |
| "grad_norm": 0.05341513082385063, | |
| "learning_rate": 0.00010946601941747572, | |
| "loss": 0.6743, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.019682886823401, | |
| "grad_norm": 0.060330722481012344, | |
| "learning_rate": 0.00010922330097087377, | |
| "loss": 0.6878, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.0218698742482233, | |
| "grad_norm": 0.04241131246089935, | |
| "learning_rate": 0.00010898058252427184, | |
| "loss": 0.5132, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.024056861673045, | |
| "grad_norm": 0.06371448934078217, | |
| "learning_rate": 0.0001087378640776699, | |
| "loss": 0.3898, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.0262438490978676, | |
| "grad_norm": 0.08609224110841751, | |
| "learning_rate": 0.00010849514563106795, | |
| "loss": 0.6915, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.02843083652269, | |
| "grad_norm": 0.08693238347768784, | |
| "learning_rate": 0.000108252427184466, | |
| "loss": 0.7032, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.0306178239475123, | |
| "grad_norm": 0.10312940180301666, | |
| "learning_rate": 0.00010800970873786408, | |
| "loss": 0.7286, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.0328048113723347, | |
| "grad_norm": 0.03645063191652298, | |
| "learning_rate": 0.00010776699029126213, | |
| "loss": 0.5828, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.034991798797157, | |
| "grad_norm": 0.12635767459869385, | |
| "learning_rate": 0.00010752427184466018, | |
| "loss": 0.6497, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.037178786221979, | |
| "grad_norm": 0.04273942857980728, | |
| "learning_rate": 0.00010728155339805825, | |
| "loss": 0.9746, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.0393657736468014, | |
| "grad_norm": 0.07975783199071884, | |
| "learning_rate": 0.00010703883495145631, | |
| "loss": 0.7937, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.0415527610716238, | |
| "grad_norm": 0.05579470098018646, | |
| "learning_rate": 0.00010679611650485436, | |
| "loss": 0.3576, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.043739748496446, | |
| "grad_norm": 0.08263109624385834, | |
| "learning_rate": 0.00010655339805825242, | |
| "loss": 1.3163, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.0459267359212685, | |
| "grad_norm": 0.05702097713947296, | |
| "learning_rate": 0.00010631067961165047, | |
| "loss": 0.6933, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.048113723346091, | |
| "grad_norm": 0.08686594665050507, | |
| "learning_rate": 0.00010606796116504853, | |
| "loss": 0.7076, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.0503007107709132, | |
| "grad_norm": 0.06772071868181229, | |
| "learning_rate": 0.00010582524271844659, | |
| "loss": 0.8234, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 2.052487698195735, | |
| "grad_norm": 0.09771222621202469, | |
| "learning_rate": 0.00010558252427184465, | |
| "loss": 0.6862, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.0546746856205576, | |
| "grad_norm": 0.07454249262809753, | |
| "learning_rate": 0.0001053398058252427, | |
| "loss": 0.7456, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 2.05686167304538, | |
| "grad_norm": 0.15167352557182312, | |
| "learning_rate": 0.00010509708737864075, | |
| "loss": 0.6754, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.0590486604702023, | |
| "grad_norm": 0.06704775243997574, | |
| "learning_rate": 0.00010485436893203883, | |
| "loss": 0.6393, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 2.0612356478950247, | |
| "grad_norm": 0.08005604147911072, | |
| "learning_rate": 0.00010461165048543688, | |
| "loss": 0.6547, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.063422635319847, | |
| "grad_norm": 0.09053482115268707, | |
| "learning_rate": 0.00010436893203883493, | |
| "loss": 0.6647, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.0656096227446694, | |
| "grad_norm": 0.040814295411109924, | |
| "learning_rate": 0.00010412621359223301, | |
| "loss": 0.5523, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.0677966101694913, | |
| "grad_norm": 0.16581493616104126, | |
| "learning_rate": 0.00010388349514563106, | |
| "loss": 0.6994, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 2.0699835975943137, | |
| "grad_norm": 0.0843871682882309, | |
| "learning_rate": 0.00010364077669902911, | |
| "loss": 0.6573, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.072170585019136, | |
| "grad_norm": 0.32330793142318726, | |
| "learning_rate": 0.00010339805825242718, | |
| "loss": 0.5952, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 2.0743575724439585, | |
| "grad_norm": 0.06891205906867981, | |
| "learning_rate": 0.00010315533980582523, | |
| "loss": 0.5727, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.076544559868781, | |
| "grad_norm": 0.14751192927360535, | |
| "learning_rate": 0.00010291262135922329, | |
| "loss": 0.655, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 2.078731547293603, | |
| "grad_norm": 0.12010716646909714, | |
| "learning_rate": 0.00010266990291262135, | |
| "loss": 0.7208, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.0809185347184256, | |
| "grad_norm": 0.056473225355148315, | |
| "learning_rate": 0.0001024271844660194, | |
| "loss": 0.4184, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 2.0831055221432475, | |
| "grad_norm": 0.05701868236064911, | |
| "learning_rate": 0.00010218446601941746, | |
| "loss": 0.5916, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.08529250956807, | |
| "grad_norm": 0.1446901112794876, | |
| "learning_rate": 0.00010194174757281553, | |
| "loss": 0.8088, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.0874794969928923, | |
| "grad_norm": 0.12073302268981934, | |
| "learning_rate": 0.00010169902912621358, | |
| "loss": 0.6987, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.0896664844177146, | |
| "grad_norm": 0.10358598083257675, | |
| "learning_rate": 0.00010145631067961163, | |
| "loss": 0.5885, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 2.091853471842537, | |
| "grad_norm": 0.04258884862065315, | |
| "learning_rate": 0.00010121359223300971, | |
| "loss": 0.4822, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.0940404592673594, | |
| "grad_norm": 0.08908698707818985, | |
| "learning_rate": 0.00010097087378640776, | |
| "loss": 0.6025, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 2.0962274466921813, | |
| "grad_norm": 0.06096798926591873, | |
| "learning_rate": 0.00010072815533980581, | |
| "loss": 0.6388, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.0984144341170037, | |
| "grad_norm": 0.05718372389674187, | |
| "learning_rate": 0.00010048543689320388, | |
| "loss": 0.3815, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 2.100601421541826, | |
| "grad_norm": 0.08511919528245926, | |
| "learning_rate": 0.00010024271844660193, | |
| "loss": 0.6558, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.1027884089666484, | |
| "grad_norm": 0.08019436150789261, | |
| "learning_rate": 9.999999999999999e-05, | |
| "loss": 0.6916, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 2.104975396391471, | |
| "grad_norm": 0.06560107320547104, | |
| "learning_rate": 9.975728155339806e-05, | |
| "loss": 0.5873, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.107162383816293, | |
| "grad_norm": 0.14055019617080688, | |
| "learning_rate": 9.95145631067961e-05, | |
| "loss": 0.5676, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.1093493712411155, | |
| "grad_norm": 0.1176968365907669, | |
| "learning_rate": 9.927184466019416e-05, | |
| "loss": 0.5483, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.1115363586659375, | |
| "grad_norm": 0.07665383070707321, | |
| "learning_rate": 9.902912621359223e-05, | |
| "loss": 0.5784, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 2.11372334609076, | |
| "grad_norm": 0.1384451985359192, | |
| "learning_rate": 9.878640776699028e-05, | |
| "loss": 0.6778, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.115910333515582, | |
| "grad_norm": 0.09405054897069931, | |
| "learning_rate": 9.854368932038833e-05, | |
| "loss": 0.7837, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 2.1180973209404046, | |
| "grad_norm": 0.06697643548250198, | |
| "learning_rate": 9.83009708737864e-05, | |
| "loss": 0.5959, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.120284308365227, | |
| "grad_norm": 0.09054021537303925, | |
| "learning_rate": 9.805825242718446e-05, | |
| "loss": 0.8132, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 2.1224712957900493, | |
| "grad_norm": 0.1512996107339859, | |
| "learning_rate": 9.781553398058251e-05, | |
| "loss": 0.8451, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.1246582832148717, | |
| "grad_norm": 0.10180766880512238, | |
| "learning_rate": 9.757281553398058e-05, | |
| "loss": 0.949, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 2.1268452706396936, | |
| "grad_norm": 0.0628434494137764, | |
| "learning_rate": 9.733009708737863e-05, | |
| "loss": 0.6542, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.129032258064516, | |
| "grad_norm": 0.07519734650850296, | |
| "learning_rate": 9.708737864077669e-05, | |
| "loss": 0.7679, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.1312192454893384, | |
| "grad_norm": 0.07029229402542114, | |
| "learning_rate": 9.684466019417476e-05, | |
| "loss": 0.5905, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.1334062329141608, | |
| "grad_norm": 0.06242423877120018, | |
| "learning_rate": 9.660194174757281e-05, | |
| "loss": 0.4672, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 2.135593220338983, | |
| "grad_norm": 0.044176701456308365, | |
| "learning_rate": 9.635922330097086e-05, | |
| "loss": 0.6701, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.1377802077638055, | |
| "grad_norm": 0.09683780372142792, | |
| "learning_rate": 9.611650485436893e-05, | |
| "loss": 0.8281, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 2.1399671951886274, | |
| "grad_norm": 0.08462493866682053, | |
| "learning_rate": 9.587378640776699e-05, | |
| "loss": 0.569, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.14215418261345, | |
| "grad_norm": 0.10928475111722946, | |
| "learning_rate": 9.563106796116504e-05, | |
| "loss": 0.7517, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 2.144341170038272, | |
| "grad_norm": 0.045466918498277664, | |
| "learning_rate": 9.53883495145631e-05, | |
| "loss": 0.6727, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.1465281574630946, | |
| "grad_norm": 0.10792270302772522, | |
| "learning_rate": 9.514563106796116e-05, | |
| "loss": 0.6238, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 2.148715144887917, | |
| "grad_norm": 0.09913436323404312, | |
| "learning_rate": 9.490291262135921e-05, | |
| "loss": 0.6343, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.1509021323127393, | |
| "grad_norm": 0.06535235792398453, | |
| "learning_rate": 9.466019417475728e-05, | |
| "loss": 0.449, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.1530891197375617, | |
| "grad_norm": 0.09585700929164886, | |
| "learning_rate": 9.441747572815533e-05, | |
| "loss": 0.6492, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.1552761071623836, | |
| "grad_norm": 0.05615914240479469, | |
| "learning_rate": 9.41747572815534e-05, | |
| "loss": 0.7197, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 2.157463094587206, | |
| "grad_norm": 0.04286925494670868, | |
| "learning_rate": 9.393203883495146e-05, | |
| "loss": 0.7092, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.1596500820120283, | |
| "grad_norm": 0.19396215677261353, | |
| "learning_rate": 9.368932038834951e-05, | |
| "loss": 0.6476, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 2.1618370694368507, | |
| "grad_norm": 0.05644046515226364, | |
| "learning_rate": 9.344660194174756e-05, | |
| "loss": 0.5788, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.164024056861673, | |
| "grad_norm": 0.10071127116680145, | |
| "learning_rate": 9.320388349514561e-05, | |
| "loss": 0.7672, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 2.1662110442864955, | |
| "grad_norm": 0.2811974883079529, | |
| "learning_rate": 9.296116504854369e-05, | |
| "loss": 0.6406, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.168398031711318, | |
| "grad_norm": 0.050698939710855484, | |
| "learning_rate": 9.271844660194174e-05, | |
| "loss": 0.5947, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 2.1705850191361398, | |
| "grad_norm": 0.06642589718103409, | |
| "learning_rate": 9.247572815533979e-05, | |
| "loss": 0.6463, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.172772006560962, | |
| "grad_norm": 0.10926207154989243, | |
| "learning_rate": 9.223300970873787e-05, | |
| "loss": 0.6987, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.1749589939857845, | |
| "grad_norm": 0.0996134802699089, | |
| "learning_rate": 9.199029126213592e-05, | |
| "loss": 0.5775, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.177145981410607, | |
| "grad_norm": 0.06016650050878525, | |
| "learning_rate": 9.174757281553397e-05, | |
| "loss": 0.7562, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 2.1793329688354293, | |
| "grad_norm": 0.07599005103111267, | |
| "learning_rate": 9.150485436893203e-05, | |
| "loss": 0.5302, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.1815199562602516, | |
| "grad_norm": 0.13693709671497345, | |
| "learning_rate": 9.126213592233008e-05, | |
| "loss": 0.5294, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 2.183706943685074, | |
| "grad_norm": 0.15747599303722382, | |
| "learning_rate": 9.101941747572814e-05, | |
| "loss": 0.5905, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.185893931109896, | |
| "grad_norm": 0.0751093253493309, | |
| "learning_rate": 9.077669902912621e-05, | |
| "loss": 0.5914, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 2.1880809185347183, | |
| "grad_norm": 0.06828130036592484, | |
| "learning_rate": 9.053398058252426e-05, | |
| "loss": 0.5183, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.1902679059595407, | |
| "grad_norm": 0.0638083666563034, | |
| "learning_rate": 9.029126213592231e-05, | |
| "loss": 0.6065, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 2.192454893384363, | |
| "grad_norm": 0.10457223653793335, | |
| "learning_rate": 9.004854368932039e-05, | |
| "loss": 0.5831, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.1946418808091854, | |
| "grad_norm": 0.08131687343120575, | |
| "learning_rate": 8.980582524271844e-05, | |
| "loss": 0.6268, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.196828868234008, | |
| "grad_norm": 0.06330417096614838, | |
| "learning_rate": 8.956310679611649e-05, | |
| "loss": 0.5261, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.19901585565883, | |
| "grad_norm": 0.056995972990989685, | |
| "learning_rate": 8.932038834951457e-05, | |
| "loss": 0.5737, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 2.201202843083652, | |
| "grad_norm": 0.10710989683866501, | |
| "learning_rate": 8.907766990291262e-05, | |
| "loss": 0.5823, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.2033898305084745, | |
| "grad_norm": 0.08252164721488953, | |
| "learning_rate": 8.883495145631067e-05, | |
| "loss": 0.5026, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 2.205576817933297, | |
| "grad_norm": 0.09622621536254883, | |
| "learning_rate": 8.859223300970873e-05, | |
| "loss": 0.6359, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.2077638053581192, | |
| "grad_norm": 0.15953707695007324, | |
| "learning_rate": 8.834951456310678e-05, | |
| "loss": 0.8133, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 2.2099507927829416, | |
| "grad_norm": 0.02987244725227356, | |
| "learning_rate": 8.810679611650485e-05, | |
| "loss": 0.6058, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.212137780207764, | |
| "grad_norm": 0.0736922174692154, | |
| "learning_rate": 8.786407766990291e-05, | |
| "loss": 0.7735, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 2.2143247676325863, | |
| "grad_norm": 0.06956300139427185, | |
| "learning_rate": 8.762135922330096e-05, | |
| "loss": 0.8033, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.2165117550574083, | |
| "grad_norm": 0.06588080525398254, | |
| "learning_rate": 8.737864077669901e-05, | |
| "loss": 0.59, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.2186987424822306, | |
| "grad_norm": 0.0655379667878151, | |
| "learning_rate": 8.713592233009709e-05, | |
| "loss": 0.433, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.220885729907053, | |
| "grad_norm": 0.0930158868432045, | |
| "learning_rate": 8.689320388349514e-05, | |
| "loss": 0.7287, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 2.2230727173318754, | |
| "grad_norm": 0.10869035869836807, | |
| "learning_rate": 8.665048543689319e-05, | |
| "loss": 0.8585, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.2252597047566978, | |
| "grad_norm": 0.10342340171337128, | |
| "learning_rate": 8.640776699029127e-05, | |
| "loss": 0.8002, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 2.22744669218152, | |
| "grad_norm": 0.2504841685295105, | |
| "learning_rate": 8.616504854368932e-05, | |
| "loss": 0.5052, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.229633679606342, | |
| "grad_norm": 0.07130393385887146, | |
| "learning_rate": 8.592233009708737e-05, | |
| "loss": 0.6461, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 2.2318206670311644, | |
| "grad_norm": 0.06542620062828064, | |
| "learning_rate": 8.567961165048543e-05, | |
| "loss": 0.5922, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.234007654455987, | |
| "grad_norm": 0.05315176770091057, | |
| "learning_rate": 8.543689320388348e-05, | |
| "loss": 0.5041, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 2.236194641880809, | |
| "grad_norm": 0.06824400275945663, | |
| "learning_rate": 8.519417475728155e-05, | |
| "loss": 0.6448, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.2383816293056316, | |
| "grad_norm": 0.11105392873287201, | |
| "learning_rate": 8.495145631067961e-05, | |
| "loss": 0.6203, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.240568616730454, | |
| "grad_norm": 0.08662376552820206, | |
| "learning_rate": 8.470873786407766e-05, | |
| "loss": 0.7169, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.2427556041552763, | |
| "grad_norm": 0.07787991315126419, | |
| "learning_rate": 8.446601941747571e-05, | |
| "loss": 0.7499, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 2.2449425915800982, | |
| "grad_norm": 0.0659622848033905, | |
| "learning_rate": 8.422330097087379e-05, | |
| "loss": 1.0476, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.2471295790049206, | |
| "grad_norm": 0.08333081007003784, | |
| "learning_rate": 8.398058252427184e-05, | |
| "loss": 0.5645, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 2.249316566429743, | |
| "grad_norm": 0.0681498721241951, | |
| "learning_rate": 8.373786407766989e-05, | |
| "loss": 0.5698, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.2515035538545654, | |
| "grad_norm": 0.042218856513500214, | |
| "learning_rate": 8.349514563106795e-05, | |
| "loss": 0.7866, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 2.2536905412793877, | |
| "grad_norm": 0.1502753645181656, | |
| "learning_rate": 8.325242718446602e-05, | |
| "loss": 0.7288, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.25587752870421, | |
| "grad_norm": 0.04378807172179222, | |
| "learning_rate": 8.300970873786407e-05, | |
| "loss": 0.663, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 2.258064516129032, | |
| "grad_norm": 0.06042996048927307, | |
| "learning_rate": 8.276699029126213e-05, | |
| "loss": 0.7021, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.2602515035538544, | |
| "grad_norm": 0.13028943538665771, | |
| "learning_rate": 8.252427184466018e-05, | |
| "loss": 0.662, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.2624384909786768, | |
| "grad_norm": 0.10280822962522507, | |
| "learning_rate": 8.228155339805825e-05, | |
| "loss": 0.7972, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.264625478403499, | |
| "grad_norm": 0.08380752801895142, | |
| "learning_rate": 8.203883495145631e-05, | |
| "loss": 0.8213, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 2.2668124658283215, | |
| "grad_norm": 0.0534130297601223, | |
| "learning_rate": 8.179611650485436e-05, | |
| "loss": 0.6322, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.268999453253144, | |
| "grad_norm": 0.029188288375735283, | |
| "learning_rate": 8.155339805825241e-05, | |
| "loss": 1.1116, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 2.2711864406779663, | |
| "grad_norm": 0.07449432462453842, | |
| "learning_rate": 8.131067961165049e-05, | |
| "loss": 0.7614, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.273373428102788, | |
| "grad_norm": 0.13352523744106293, | |
| "learning_rate": 8.106796116504854e-05, | |
| "loss": 0.7191, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 2.2755604155276106, | |
| "grad_norm": 0.18024753034114838, | |
| "learning_rate": 8.082524271844659e-05, | |
| "loss": 0.6954, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.277747402952433, | |
| "grad_norm": 0.07135174423456192, | |
| "learning_rate": 8.058252427184464e-05, | |
| "loss": 0.7497, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 2.2799343903772553, | |
| "grad_norm": 0.04793510586023331, | |
| "learning_rate": 8.033980582524272e-05, | |
| "loss": 0.6165, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.2821213778020777, | |
| "grad_norm": 0.12190023809671402, | |
| "learning_rate": 8.009708737864077e-05, | |
| "loss": 0.8184, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.2843083652269, | |
| "grad_norm": 0.06986870616674423, | |
| "learning_rate": 7.985436893203882e-05, | |
| "loss": 0.7382, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.2864953526517224, | |
| "grad_norm": 0.044186268001794815, | |
| "learning_rate": 7.961165048543688e-05, | |
| "loss": 0.6763, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 2.2886823400765444, | |
| "grad_norm": 0.08646490424871445, | |
| "learning_rate": 7.936893203883495e-05, | |
| "loss": 0.8968, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.2908693275013667, | |
| "grad_norm": 0.14186227321624756, | |
| "learning_rate": 7.9126213592233e-05, | |
| "loss": 0.7973, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 2.293056314926189, | |
| "grad_norm": 0.2580704987049103, | |
| "learning_rate": 7.888349514563106e-05, | |
| "loss": 0.5353, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.2952433023510115, | |
| "grad_norm": 0.08344961702823639, | |
| "learning_rate": 7.864077669902911e-05, | |
| "loss": 0.4765, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.297430289775834, | |
| "grad_norm": 0.04605596140027046, | |
| "learning_rate": 7.839805825242716e-05, | |
| "loss": 0.5672, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.2996172772006562, | |
| "grad_norm": 0.08017672598361969, | |
| "learning_rate": 7.815533980582524e-05, | |
| "loss": 0.6236, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 2.3018042646254786, | |
| "grad_norm": 0.0792083665728569, | |
| "learning_rate": 7.791262135922329e-05, | |
| "loss": 0.6233, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.3039912520503005, | |
| "grad_norm": 3.3572826385498047, | |
| "learning_rate": 7.766990291262134e-05, | |
| "loss": 0.6117, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.306178239475123, | |
| "grad_norm": 0.0807461366057396, | |
| "learning_rate": 7.742718446601942e-05, | |
| "loss": 0.7919, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.3083652268999453, | |
| "grad_norm": 0.0449124239385128, | |
| "learning_rate": 7.718446601941747e-05, | |
| "loss": 0.4221, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 2.3105522143247677, | |
| "grad_norm": 0.03787796199321747, | |
| "learning_rate": 7.694174757281552e-05, | |
| "loss": 0.582, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.31273920174959, | |
| "grad_norm": 0.04931551218032837, | |
| "learning_rate": 7.669902912621359e-05, | |
| "loss": 0.569, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 2.3149261891744124, | |
| "grad_norm": 0.08203588426113129, | |
| "learning_rate": 7.645631067961165e-05, | |
| "loss": 0.5703, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.3171131765992348, | |
| "grad_norm": 0.07741877436637878, | |
| "learning_rate": 7.62135922330097e-05, | |
| "loss": 0.5266, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 2.3193001640240567, | |
| "grad_norm": 0.07360921800136566, | |
| "learning_rate": 7.597087378640776e-05, | |
| "loss": 0.5674, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.321487151448879, | |
| "grad_norm": 0.0719866156578064, | |
| "learning_rate": 7.572815533980581e-05, | |
| "loss": 0.8207, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 2.3236741388737014, | |
| "grad_norm": 0.0980333462357521, | |
| "learning_rate": 7.548543689320387e-05, | |
| "loss": 0.6523, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.325861126298524, | |
| "grad_norm": 0.06781519204378128, | |
| "learning_rate": 7.524271844660194e-05, | |
| "loss": 0.5863, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.328048113723346, | |
| "grad_norm": 0.14128249883651733, | |
| "learning_rate": 7.5e-05, | |
| "loss": 0.7345, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.3302351011481686, | |
| "grad_norm": 0.0773346945643425, | |
| "learning_rate": 7.475728155339806e-05, | |
| "loss": 0.5622, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 2.332422088572991, | |
| "grad_norm": 0.06902678310871124, | |
| "learning_rate": 7.451456310679611e-05, | |
| "loss": 0.432, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.334609075997813, | |
| "grad_norm": 0.05239726975560188, | |
| "learning_rate": 7.427184466019417e-05, | |
| "loss": 0.5932, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 2.3367960634226352, | |
| "grad_norm": 0.0757569819688797, | |
| "learning_rate": 7.402912621359224e-05, | |
| "loss": 0.7471, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.3389830508474576, | |
| "grad_norm": 0.07388169318437576, | |
| "learning_rate": 7.378640776699029e-05, | |
| "loss": 0.8203, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 2.34117003827228, | |
| "grad_norm": 0.06949525326490402, | |
| "learning_rate": 7.354368932038834e-05, | |
| "loss": 0.7012, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.3433570256971024, | |
| "grad_norm": 0.19210799038410187, | |
| "learning_rate": 7.33009708737864e-05, | |
| "loss": 0.8093, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 2.3455440131219247, | |
| "grad_norm": 0.06348125636577606, | |
| "learning_rate": 7.305825242718445e-05, | |
| "loss": 0.6288, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.347731000546747, | |
| "grad_norm": 0.10887488722801208, | |
| "learning_rate": 7.281553398058252e-05, | |
| "loss": 0.787, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.349917987971569, | |
| "grad_norm": 0.02196069434285164, | |
| "learning_rate": 7.257281553398058e-05, | |
| "loss": 0.4349, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.3521049753963914, | |
| "grad_norm": 0.09422359615564346, | |
| "learning_rate": 7.233009708737863e-05, | |
| "loss": 0.6258, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 2.354291962821214, | |
| "grad_norm": 0.07091114670038223, | |
| "learning_rate": 7.20873786407767e-05, | |
| "loss": 0.629, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.356478950246036, | |
| "grad_norm": 0.07532557845115662, | |
| "learning_rate": 7.184466019417474e-05, | |
| "loss": 0.5756, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 2.3586659376708585, | |
| "grad_norm": 0.08222904056310654, | |
| "learning_rate": 7.160194174757281e-05, | |
| "loss": 1.0221, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.3608529250956805, | |
| "grad_norm": 0.10531949996948242, | |
| "learning_rate": 7.135922330097087e-05, | |
| "loss": 0.7223, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 2.363039912520503, | |
| "grad_norm": 0.16746295988559723, | |
| "learning_rate": 7.111650485436892e-05, | |
| "loss": 0.628, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.365226899945325, | |
| "grad_norm": 0.030869996175169945, | |
| "learning_rate": 7.087378640776699e-05, | |
| "loss": 0.5624, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 2.3674138873701476, | |
| "grad_norm": 0.0833895355463028, | |
| "learning_rate": 7.063106796116504e-05, | |
| "loss": 0.7341, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.36960087479497, | |
| "grad_norm": 0.05863624066114426, | |
| "learning_rate": 7.03883495145631e-05, | |
| "loss": 0.5487, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.3717878622197923, | |
| "grad_norm": 0.0941510945558548, | |
| "learning_rate": 7.014563106796115e-05, | |
| "loss": 0.7639, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.3739748496446147, | |
| "grad_norm": 0.08358798176050186, | |
| "learning_rate": 6.990291262135922e-05, | |
| "loss": 0.7019, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 2.3761618370694366, | |
| "grad_norm": 0.1289578527212143, | |
| "learning_rate": 6.966019417475727e-05, | |
| "loss": 0.6672, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.378348824494259, | |
| "grad_norm": 0.03130876645445824, | |
| "learning_rate": 6.941747572815533e-05, | |
| "loss": 0.9502, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 2.3805358119190814, | |
| "grad_norm": 0.05892246961593628, | |
| "learning_rate": 6.91747572815534e-05, | |
| "loss": 0.4054, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.3827227993439037, | |
| "grad_norm": 0.08211269229650497, | |
| "learning_rate": 6.893203883495145e-05, | |
| "loss": 0.6134, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 2.384909786768726, | |
| "grad_norm": 0.14080072939395905, | |
| "learning_rate": 6.868932038834951e-05, | |
| "loss": 0.8127, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 2.3870967741935485, | |
| "grad_norm": 0.08313527703285217, | |
| "learning_rate": 6.844660194174757e-05, | |
| "loss": 0.6494, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 2.389283761618371, | |
| "grad_norm": 0.12474166601896286, | |
| "learning_rate": 6.820388349514562e-05, | |
| "loss": 0.5539, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 2.391470749043193, | |
| "grad_norm": 0.08614606410264969, | |
| "learning_rate": 6.796116504854369e-05, | |
| "loss": 0.6714, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.393657736468015, | |
| "grad_norm": 0.07203420996665955, | |
| "learning_rate": 6.771844660194174e-05, | |
| "loss": 0.4596, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 2.3958447238928375, | |
| "grad_norm": 0.08504094183444977, | |
| "learning_rate": 6.74757281553398e-05, | |
| "loss": 0.5993, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 2.39803171131766, | |
| "grad_norm": 0.1589978188276291, | |
| "learning_rate": 6.723300970873785e-05, | |
| "loss": 0.6799, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 2.4002186987424823, | |
| "grad_norm": 0.0418832041323185, | |
| "learning_rate": 6.699029126213592e-05, | |
| "loss": 0.6429, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 2.4024056861673047, | |
| "grad_norm": 0.06284458935260773, | |
| "learning_rate": 6.674757281553397e-05, | |
| "loss": 0.6468, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.404592673592127, | |
| "grad_norm": 0.10806804895401001, | |
| "learning_rate": 6.650485436893203e-05, | |
| "loss": 0.9486, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 2.406779661016949, | |
| "grad_norm": 0.08519349247217178, | |
| "learning_rate": 6.62621359223301e-05, | |
| "loss": 0.4344, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 2.4089666484417713, | |
| "grad_norm": 0.05331065133213997, | |
| "learning_rate": 6.601941747572815e-05, | |
| "loss": 0.7634, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 2.4111536358665937, | |
| "grad_norm": 0.049161165952682495, | |
| "learning_rate": 6.577669902912621e-05, | |
| "loss": 0.5412, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 2.413340623291416, | |
| "grad_norm": 0.04863851144909859, | |
| "learning_rate": 6.553398058252426e-05, | |
| "loss": 0.5051, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.4155276107162384, | |
| "grad_norm": 0.05954289436340332, | |
| "learning_rate": 6.529126213592233e-05, | |
| "loss": 0.8638, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 2.417714598141061, | |
| "grad_norm": 0.12451896071434021, | |
| "learning_rate": 6.504854368932039e-05, | |
| "loss": 0.6294, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 2.419901585565883, | |
| "grad_norm": 0.07101567834615707, | |
| "learning_rate": 6.480582524271844e-05, | |
| "loss": 0.6948, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 2.422088572990705, | |
| "grad_norm": 0.0766870379447937, | |
| "learning_rate": 6.45631067961165e-05, | |
| "loss": 0.7316, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 2.4242755604155275, | |
| "grad_norm": 0.04993129521608353, | |
| "learning_rate": 6.432038834951455e-05, | |
| "loss": 0.5916, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.42646254784035, | |
| "grad_norm": 0.10340382903814316, | |
| "learning_rate": 6.407766990291262e-05, | |
| "loss": 0.4282, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 2.4286495352651722, | |
| "grad_norm": 0.10339093208312988, | |
| "learning_rate": 6.383495145631067e-05, | |
| "loss": 0.6349, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 2.4308365226899946, | |
| "grad_norm": 0.13234268128871918, | |
| "learning_rate": 6.359223300970873e-05, | |
| "loss": 0.8036, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 2.433023510114817, | |
| "grad_norm": 0.07981318235397339, | |
| "learning_rate": 6.334951456310678e-05, | |
| "loss": 0.6513, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 2.4352104975396394, | |
| "grad_norm": 0.13569766283035278, | |
| "learning_rate": 6.310679611650485e-05, | |
| "loss": 0.8387, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.4373974849644613, | |
| "grad_norm": 0.09119431674480438, | |
| "learning_rate": 6.286407766990291e-05, | |
| "loss": 0.585, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 2.4395844723892837, | |
| "grad_norm": 0.1081055998802185, | |
| "learning_rate": 6.262135922330096e-05, | |
| "loss": 0.6452, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 2.441771459814106, | |
| "grad_norm": 0.11525560915470123, | |
| "learning_rate": 6.237864077669903e-05, | |
| "loss": 0.7167, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 2.4439584472389284, | |
| "grad_norm": 0.06122585013508797, | |
| "learning_rate": 6.213592233009709e-05, | |
| "loss": 0.5866, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 2.446145434663751, | |
| "grad_norm": 0.09825724363327026, | |
| "learning_rate": 6.189320388349514e-05, | |
| "loss": 0.5137, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.448332422088573, | |
| "grad_norm": 0.02984674647450447, | |
| "learning_rate": 6.16504854368932e-05, | |
| "loss": 0.4119, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 2.4505194095133955, | |
| "grad_norm": 0.06197787821292877, | |
| "learning_rate": 6.140776699029126e-05, | |
| "loss": 0.6159, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 2.4527063969382175, | |
| "grad_norm": 0.04322591796517372, | |
| "learning_rate": 6.11650485436893e-05, | |
| "loss": 0.7058, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 2.45489338436304, | |
| "grad_norm": 0.03457969054579735, | |
| "learning_rate": 6.092233009708738e-05, | |
| "loss": 0.4582, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 2.457080371787862, | |
| "grad_norm": 0.07612497359514236, | |
| "learning_rate": 6.0679611650485434e-05, | |
| "loss": 0.5578, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.4592673592126846, | |
| "grad_norm": 0.05250145122408867, | |
| "learning_rate": 6.0436893203883485e-05, | |
| "loss": 0.591, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 2.461454346637507, | |
| "grad_norm": 0.11965347081422806, | |
| "learning_rate": 6.019417475728155e-05, | |
| "loss": 0.7173, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 2.4636413340623293, | |
| "grad_norm": 0.057462405413389206, | |
| "learning_rate": 5.995145631067961e-05, | |
| "loss": 0.5775, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 2.4658283214871517, | |
| "grad_norm": 0.0856429934501648, | |
| "learning_rate": 5.9708737864077663e-05, | |
| "loss": 0.5766, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 2.4680153089119736, | |
| "grad_norm": 0.09746617823839188, | |
| "learning_rate": 5.946601941747573e-05, | |
| "loss": 0.4959, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.470202296336796, | |
| "grad_norm": 0.07750790566205978, | |
| "learning_rate": 5.922330097087378e-05, | |
| "loss": 0.6716, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 2.4723892837616184, | |
| "grad_norm": 0.10556381940841675, | |
| "learning_rate": 5.8980582524271835e-05, | |
| "loss": 0.8194, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 2.4745762711864407, | |
| "grad_norm": 0.047729332000017166, | |
| "learning_rate": 5.87378640776699e-05, | |
| "loss": 0.6458, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 2.476763258611263, | |
| "grad_norm": 0.09303979575634003, | |
| "learning_rate": 5.849514563106795e-05, | |
| "loss": 0.6032, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 2.478950246036085, | |
| "grad_norm": 0.06340254843235016, | |
| "learning_rate": 5.8252427184466014e-05, | |
| "loss": 0.5289, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.4811372334609074, | |
| "grad_norm": 0.03329901024699211, | |
| "learning_rate": 5.800970873786407e-05, | |
| "loss": 0.896, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 2.48332422088573, | |
| "grad_norm": 0.1342897117137909, | |
| "learning_rate": 5.776699029126213e-05, | |
| "loss": 0.5008, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 2.485511208310552, | |
| "grad_norm": 0.03303806483745575, | |
| "learning_rate": 5.7524271844660186e-05, | |
| "loss": 0.4037, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 2.4876981957353745, | |
| "grad_norm": 0.0739361122250557, | |
| "learning_rate": 5.728155339805825e-05, | |
| "loss": 0.7167, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 2.489885183160197, | |
| "grad_norm": 0.06320933997631073, | |
| "learning_rate": 5.70388349514563e-05, | |
| "loss": 0.919, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.4920721705850193, | |
| "grad_norm": 0.0679958239197731, | |
| "learning_rate": 5.6796116504854364e-05, | |
| "loss": 0.731, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 2.494259158009841, | |
| "grad_norm": 0.12814053893089294, | |
| "learning_rate": 5.655339805825242e-05, | |
| "loss": 0.8651, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 2.4964461454346636, | |
| "grad_norm": 0.07267304509878159, | |
| "learning_rate": 5.631067961165048e-05, | |
| "loss": 0.4623, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 2.498633132859486, | |
| "grad_norm": 0.06321340799331665, | |
| "learning_rate": 5.6067961165048536e-05, | |
| "loss": 0.5501, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 2.5008201202843083, | |
| "grad_norm": 0.10648145526647568, | |
| "learning_rate": 5.58252427184466e-05, | |
| "loss": 0.5048, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.5030071077091307, | |
| "grad_norm": 0.10429555922746658, | |
| "learning_rate": 5.558252427184465e-05, | |
| "loss": 0.701, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 2.505194095133953, | |
| "grad_norm": 0.06531582027673721, | |
| "learning_rate": 5.5339805825242715e-05, | |
| "loss": 0.446, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 2.5073810825587755, | |
| "grad_norm": 0.04627282917499542, | |
| "learning_rate": 5.509708737864077e-05, | |
| "loss": 0.5879, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 2.5095680699835974, | |
| "grad_norm": 0.12454242259263992, | |
| "learning_rate": 5.485436893203883e-05, | |
| "loss": 0.5276, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 2.5117550574084198, | |
| "grad_norm": 0.09565190225839615, | |
| "learning_rate": 5.461165048543689e-05, | |
| "loss": 0.6706, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.513942044833242, | |
| "grad_norm": 0.09251926094293594, | |
| "learning_rate": 5.436893203883495e-05, | |
| "loss": 0.6097, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 2.5161290322580645, | |
| "grad_norm": 0.08162148296833038, | |
| "learning_rate": 5.4126213592233e-05, | |
| "loss": 0.5359, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 2.518316019682887, | |
| "grad_norm": 0.041675105690956116, | |
| "learning_rate": 5.3883495145631065e-05, | |
| "loss": 0.8033, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 2.5205030071077092, | |
| "grad_norm": 0.039802707731723785, | |
| "learning_rate": 5.364077669902912e-05, | |
| "loss": 0.6052, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 2.5226899945325316, | |
| "grad_norm": 0.08948308974504471, | |
| "learning_rate": 5.339805825242718e-05, | |
| "loss": 0.6798, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.5248769819573536, | |
| "grad_norm": 0.10361290723085403, | |
| "learning_rate": 5.315533980582524e-05, | |
| "loss": 0.5986, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 2.527063969382176, | |
| "grad_norm": 0.1291511505842209, | |
| "learning_rate": 5.2912621359223295e-05, | |
| "loss": 0.6655, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 2.5292509568069983, | |
| "grad_norm": 0.15041688084602356, | |
| "learning_rate": 5.266990291262135e-05, | |
| "loss": 0.7115, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 2.5314379442318207, | |
| "grad_norm": 0.08857720345258713, | |
| "learning_rate": 5.2427184466019416e-05, | |
| "loss": 0.7579, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 2.533624931656643, | |
| "grad_norm": 0.08004803210496902, | |
| "learning_rate": 5.2184466019417466e-05, | |
| "loss": 0.6388, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.5358119190814654, | |
| "grad_norm": 0.06408461928367615, | |
| "learning_rate": 5.194174757281553e-05, | |
| "loss": 0.6373, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 2.537998906506288, | |
| "grad_norm": 0.08298526704311371, | |
| "learning_rate": 5.169902912621359e-05, | |
| "loss": 0.7151, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 2.5401858939311097, | |
| "grad_norm": 0.09231656789779663, | |
| "learning_rate": 5.1456310679611645e-05, | |
| "loss": 0.6188, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 2.542372881355932, | |
| "grad_norm": 0.11329259723424911, | |
| "learning_rate": 5.12135922330097e-05, | |
| "loss": 0.6681, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 2.5445598687807545, | |
| "grad_norm": 0.17237365245819092, | |
| "learning_rate": 5.0970873786407766e-05, | |
| "loss": 0.8227, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.546746856205577, | |
| "grad_norm": 0.07260779291391373, | |
| "learning_rate": 5.072815533980582e-05, | |
| "loss": 0.6832, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 2.548933843630399, | |
| "grad_norm": 0.0579003281891346, | |
| "learning_rate": 5.048543689320388e-05, | |
| "loss": 0.5133, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 2.5511208310552216, | |
| "grad_norm": 0.05579472705721855, | |
| "learning_rate": 5.024271844660194e-05, | |
| "loss": 0.5569, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 2.553307818480044, | |
| "grad_norm": 0.08632995188236237, | |
| "learning_rate": 4.9999999999999996e-05, | |
| "loss": 0.8297, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 2.555494805904866, | |
| "grad_norm": 0.05742983520030975, | |
| "learning_rate": 4.975728155339805e-05, | |
| "loss": 0.5625, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.5576817933296883, | |
| "grad_norm": 0.09453807771205902, | |
| "learning_rate": 4.951456310679612e-05, | |
| "loss": 0.5199, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 2.5598687807545106, | |
| "grad_norm": 0.07085605710744858, | |
| "learning_rate": 4.927184466019417e-05, | |
| "loss": 0.6006, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 2.562055768179333, | |
| "grad_norm": 0.1340280920267105, | |
| "learning_rate": 4.902912621359223e-05, | |
| "loss": 0.6783, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 2.5642427556041554, | |
| "grad_norm": 0.08021483570337296, | |
| "learning_rate": 4.878640776699029e-05, | |
| "loss": 0.4783, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 2.5664297430289773, | |
| "grad_norm": 0.04842549189925194, | |
| "learning_rate": 4.8543689320388346e-05, | |
| "loss": 0.6662, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.5686167304538, | |
| "grad_norm": 0.1178601086139679, | |
| "learning_rate": 4.8300970873786403e-05, | |
| "loss": 0.7801, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 2.570803717878622, | |
| "grad_norm": 0.06883268803358078, | |
| "learning_rate": 4.805825242718447e-05, | |
| "loss": 0.5078, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 2.5729907053034444, | |
| "grad_norm": 0.05313848704099655, | |
| "learning_rate": 4.781553398058252e-05, | |
| "loss": 0.7318, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 2.575177692728267, | |
| "grad_norm": 0.05223860591650009, | |
| "learning_rate": 4.757281553398058e-05, | |
| "loss": 0.5506, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 2.577364680153089, | |
| "grad_norm": 0.06799974292516708, | |
| "learning_rate": 4.733009708737864e-05, | |
| "loss": 0.7171, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.5795516675779115, | |
| "grad_norm": 0.05223657935857773, | |
| "learning_rate": 4.70873786407767e-05, | |
| "loss": 0.6697, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 2.5817386550027335, | |
| "grad_norm": 0.12002090364694595, | |
| "learning_rate": 4.6844660194174754e-05, | |
| "loss": 0.5765, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 2.5839256424275563, | |
| "grad_norm": 0.112289659678936, | |
| "learning_rate": 4.6601941747572804e-05, | |
| "loss": 0.6459, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 2.586112629852378, | |
| "grad_norm": 0.06973439455032349, | |
| "learning_rate": 4.635922330097087e-05, | |
| "loss": 0.6429, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 2.5882996172772006, | |
| "grad_norm": 0.08749748021364212, | |
| "learning_rate": 4.611650485436893e-05, | |
| "loss": 0.6056, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.590486604702023, | |
| "grad_norm": 0.07339344173669815, | |
| "learning_rate": 4.587378640776698e-05, | |
| "loss": 0.6424, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 2.5926735921268453, | |
| "grad_norm": 0.06114998459815979, | |
| "learning_rate": 4.563106796116504e-05, | |
| "loss": 0.5619, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 2.5948605795516677, | |
| "grad_norm": 0.10229608416557312, | |
| "learning_rate": 4.5388349514563104e-05, | |
| "loss": 0.486, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 2.5970475669764896, | |
| "grad_norm": 0.10732418298721313, | |
| "learning_rate": 4.5145631067961155e-05, | |
| "loss": 0.646, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 2.5992345544013125, | |
| "grad_norm": 0.09944164752960205, | |
| "learning_rate": 4.490291262135922e-05, | |
| "loss": 0.6848, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.6014215418261344, | |
| "grad_norm": 0.05396636202931404, | |
| "learning_rate": 4.466019417475728e-05, | |
| "loss": 0.6142, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 2.6036085292509568, | |
| "grad_norm": 0.12629559636116028, | |
| "learning_rate": 4.4417475728155334e-05, | |
| "loss": 0.4058, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 2.605795516675779, | |
| "grad_norm": 0.10848679393529892, | |
| "learning_rate": 4.417475728155339e-05, | |
| "loss": 0.7677, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 2.6079825041006015, | |
| "grad_norm": 0.06883646547794342, | |
| "learning_rate": 4.3932038834951455e-05, | |
| "loss": 0.6009, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 2.610169491525424, | |
| "grad_norm": 0.0868680477142334, | |
| "learning_rate": 4.3689320388349505e-05, | |
| "loss": 0.684, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.612356478950246, | |
| "grad_norm": 0.028191180899739265, | |
| "learning_rate": 4.344660194174757e-05, | |
| "loss": 0.5871, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 2.614543466375068, | |
| "grad_norm": 0.07453961670398712, | |
| "learning_rate": 4.3203883495145634e-05, | |
| "loss": 0.5428, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 2.6167304537998906, | |
| "grad_norm": 0.08585245162248611, | |
| "learning_rate": 4.2961165048543684e-05, | |
| "loss": 0.6162, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 2.618917441224713, | |
| "grad_norm": 0.052546802908182144, | |
| "learning_rate": 4.271844660194174e-05, | |
| "loss": 0.5801, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 2.6211044286495353, | |
| "grad_norm": 0.131413534283638, | |
| "learning_rate": 4.2475728155339805e-05, | |
| "loss": 0.9167, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.6232914160743577, | |
| "grad_norm": 0.22036047279834747, | |
| "learning_rate": 4.2233009708737856e-05, | |
| "loss": 0.5283, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 2.62547840349918, | |
| "grad_norm": 0.3202528953552246, | |
| "learning_rate": 4.199029126213592e-05, | |
| "loss": 0.7045, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 2.627665390924002, | |
| "grad_norm": 0.04368839040398598, | |
| "learning_rate": 4.174757281553398e-05, | |
| "loss": 0.5529, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 2.6298523783488243, | |
| "grad_norm": 0.04791286960244179, | |
| "learning_rate": 4.1504854368932035e-05, | |
| "loss": 0.5689, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 2.6320393657736467, | |
| "grad_norm": 0.04749435931444168, | |
| "learning_rate": 4.126213592233009e-05, | |
| "loss": 0.4968, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 2.634226353198469, | |
| "grad_norm": 0.06943855434656143, | |
| "learning_rate": 4.1019417475728156e-05, | |
| "loss": 0.6496, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 2.6364133406232915, | |
| "grad_norm": 0.059361230581998825, | |
| "learning_rate": 4.0776699029126206e-05, | |
| "loss": 0.601, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 2.638600328048114, | |
| "grad_norm": 0.11798574030399323, | |
| "learning_rate": 4.053398058252427e-05, | |
| "loss": 0.6548, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 2.640787315472936, | |
| "grad_norm": 0.05566198378801346, | |
| "learning_rate": 4.029126213592232e-05, | |
| "loss": 0.4842, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 2.642974302897758, | |
| "grad_norm": 0.07326702028512955, | |
| "learning_rate": 4.0048543689320385e-05, | |
| "loss": 0.5298, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.6451612903225805, | |
| "grad_norm": 0.05375058576464653, | |
| "learning_rate": 3.980582524271844e-05, | |
| "loss": 0.6232, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 2.647348277747403, | |
| "grad_norm": 0.05424814671278, | |
| "learning_rate": 3.95631067961165e-05, | |
| "loss": 0.5531, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 2.6495352651722253, | |
| "grad_norm": 0.04809017851948738, | |
| "learning_rate": 3.932038834951456e-05, | |
| "loss": 0.615, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 2.6517222525970476, | |
| "grad_norm": 0.07291999459266663, | |
| "learning_rate": 3.907766990291262e-05, | |
| "loss": 0.5235, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 2.65390924002187, | |
| "grad_norm": 0.11024939268827438, | |
| "learning_rate": 3.883495145631067e-05, | |
| "loss": 0.7097, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 2.6560962274466924, | |
| "grad_norm": 0.11854353547096252, | |
| "learning_rate": 3.8592233009708736e-05, | |
| "loss": 0.7801, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 2.6582832148715143, | |
| "grad_norm": 0.07645071297883987, | |
| "learning_rate": 3.834951456310679e-05, | |
| "loss": 0.632, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 2.6604702022963367, | |
| "grad_norm": 0.055733490735292435, | |
| "learning_rate": 3.810679611650485e-05, | |
| "loss": 0.7028, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 2.662657189721159, | |
| "grad_norm": 0.08349243551492691, | |
| "learning_rate": 3.786407766990291e-05, | |
| "loss": 0.6578, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 2.6648441771459814, | |
| "grad_norm": 0.10217994451522827, | |
| "learning_rate": 3.762135922330097e-05, | |
| "loss": 0.6112, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.667031164570804, | |
| "grad_norm": 0.09897356480360031, | |
| "learning_rate": 3.737864077669903e-05, | |
| "loss": 0.8556, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 2.669218151995626, | |
| "grad_norm": 0.06858976930379868, | |
| "learning_rate": 3.7135922330097086e-05, | |
| "loss": 0.4973, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 2.6714051394204485, | |
| "grad_norm": 0.06404196470975876, | |
| "learning_rate": 3.689320388349514e-05, | |
| "loss": 0.6602, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 2.6735921268452705, | |
| "grad_norm": 0.07733612507581711, | |
| "learning_rate": 3.66504854368932e-05, | |
| "loss": 0.4758, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 2.675779114270093, | |
| "grad_norm": 0.09863855689764023, | |
| "learning_rate": 3.640776699029126e-05, | |
| "loss": 0.6417, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 2.6779661016949152, | |
| "grad_norm": 0.08200982958078384, | |
| "learning_rate": 3.6165048543689315e-05, | |
| "loss": 0.6043, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 2.6801530891197376, | |
| "grad_norm": 0.07447304576635361, | |
| "learning_rate": 3.592233009708737e-05, | |
| "loss": 0.5741, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 2.68234007654456, | |
| "grad_norm": 0.05752440541982651, | |
| "learning_rate": 3.5679611650485437e-05, | |
| "loss": 1.4945, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 2.684527063969382, | |
| "grad_norm": 0.13874761760234833, | |
| "learning_rate": 3.5436893203883494e-05, | |
| "loss": 0.6187, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 2.6867140513942047, | |
| "grad_norm": 0.04540708661079407, | |
| "learning_rate": 3.519417475728155e-05, | |
| "loss": 0.4935, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.6889010388190266, | |
| "grad_norm": 0.04991524666547775, | |
| "learning_rate": 3.495145631067961e-05, | |
| "loss": 0.6385, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 2.691088026243849, | |
| "grad_norm": 0.06925421953201294, | |
| "learning_rate": 3.4708737864077666e-05, | |
| "loss": 0.6022, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 2.6932750136686714, | |
| "grad_norm": 0.1184193342924118, | |
| "learning_rate": 3.446601941747572e-05, | |
| "loss": 0.603, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 2.6954620010934938, | |
| "grad_norm": 0.054968900978565216, | |
| "learning_rate": 3.422330097087379e-05, | |
| "loss": 0.7349, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 2.697648988518316, | |
| "grad_norm": 0.053675152361392975, | |
| "learning_rate": 3.3980582524271844e-05, | |
| "loss": 0.6316, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 2.699835975943138, | |
| "grad_norm": 0.08283350616693497, | |
| "learning_rate": 3.37378640776699e-05, | |
| "loss": 0.6284, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 2.702022963367961, | |
| "grad_norm": 0.06438212096691132, | |
| "learning_rate": 3.349514563106796e-05, | |
| "loss": 0.6579, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 2.704209950792783, | |
| "grad_norm": 0.08433155715465546, | |
| "learning_rate": 3.3252427184466016e-05, | |
| "loss": 0.5701, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 2.706396938217605, | |
| "grad_norm": 0.059264179319143295, | |
| "learning_rate": 3.3009708737864073e-05, | |
| "loss": 0.7575, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 2.7085839256424276, | |
| "grad_norm": 0.0719296857714653, | |
| "learning_rate": 3.276699029126213e-05, | |
| "loss": 0.8028, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.71077091306725, | |
| "grad_norm": 0.12577201426029205, | |
| "learning_rate": 3.2524271844660195e-05, | |
| "loss": 0.6245, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 2.7129579004920723, | |
| "grad_norm": 0.06721910089254379, | |
| "learning_rate": 3.228155339805825e-05, | |
| "loss": 0.6522, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 2.7151448879168942, | |
| "grad_norm": 0.06833085417747498, | |
| "learning_rate": 3.203883495145631e-05, | |
| "loss": 0.5381, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 2.717331875341717, | |
| "grad_norm": 0.08729522675275803, | |
| "learning_rate": 3.179611650485437e-05, | |
| "loss": 0.7297, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 2.719518862766539, | |
| "grad_norm": 0.08239904046058655, | |
| "learning_rate": 3.1553398058252424e-05, | |
| "loss": 0.5976, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 2.7217058501913614, | |
| "grad_norm": 0.04949947074055672, | |
| "learning_rate": 3.131067961165048e-05, | |
| "loss": 0.593, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 2.7238928376161837, | |
| "grad_norm": 0.06915730983018875, | |
| "learning_rate": 3.1067961165048545e-05, | |
| "loss": 0.8052, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 2.726079825041006, | |
| "grad_norm": 0.1225258857011795, | |
| "learning_rate": 3.08252427184466e-05, | |
| "loss": 0.7143, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 2.7282668124658285, | |
| "grad_norm": 0.08791057020425797, | |
| "learning_rate": 3.058252427184465e-05, | |
| "loss": 0.5169, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 2.7304537998906504, | |
| "grad_norm": 0.09579493850469589, | |
| "learning_rate": 3.0339805825242717e-05, | |
| "loss": 0.5776, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.7326407873154728, | |
| "grad_norm": 0.11249995231628418, | |
| "learning_rate": 3.0097087378640774e-05, | |
| "loss": 0.7811, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 2.734827774740295, | |
| "grad_norm": 0.09353052079677582, | |
| "learning_rate": 2.9854368932038832e-05, | |
| "loss": 0.6474, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 2.7370147621651175, | |
| "grad_norm": 0.0948183462023735, | |
| "learning_rate": 2.961165048543689e-05, | |
| "loss": 0.6142, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 2.73920174958994, | |
| "grad_norm": 0.03197981417179108, | |
| "learning_rate": 2.936893203883495e-05, | |
| "loss": 0.5058, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 2.7413887370147623, | |
| "grad_norm": 0.08017853647470474, | |
| "learning_rate": 2.9126213592233007e-05, | |
| "loss": 0.583, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 2.7435757244395846, | |
| "grad_norm": 0.04022474214434624, | |
| "learning_rate": 2.8883495145631064e-05, | |
| "loss": 0.8782, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 2.7457627118644066, | |
| "grad_norm": 0.061203911900520325, | |
| "learning_rate": 2.8640776699029125e-05, | |
| "loss": 0.7384, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 2.747949699289229, | |
| "grad_norm": 0.07927464693784714, | |
| "learning_rate": 2.8398058252427182e-05, | |
| "loss": 0.651, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 2.7501366867140513, | |
| "grad_norm": 0.041599441319704056, | |
| "learning_rate": 2.815533980582524e-05, | |
| "loss": 0.7055, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 2.7523236741388737, | |
| "grad_norm": 0.05352627485990524, | |
| "learning_rate": 2.79126213592233e-05, | |
| "loss": 0.5113, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.754510661563696, | |
| "grad_norm": 0.06345394998788834, | |
| "learning_rate": 2.7669902912621357e-05, | |
| "loss": 0.6726, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 2.7566976489885184, | |
| "grad_norm": 0.059153128415346146, | |
| "learning_rate": 2.7427184466019415e-05, | |
| "loss": 0.3546, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 2.758884636413341, | |
| "grad_norm": 0.08715004473924637, | |
| "learning_rate": 2.7184466019417475e-05, | |
| "loss": 0.6837, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 2.7610716238381627, | |
| "grad_norm": 0.05116257816553116, | |
| "learning_rate": 2.6941747572815533e-05, | |
| "loss": 0.7118, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 2.763258611262985, | |
| "grad_norm": 0.118152916431427, | |
| "learning_rate": 2.669902912621359e-05, | |
| "loss": 0.6275, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 2.7654455986878075, | |
| "grad_norm": 0.04079804942011833, | |
| "learning_rate": 2.6456310679611647e-05, | |
| "loss": 0.4964, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 2.76763258611263, | |
| "grad_norm": 0.1402898132801056, | |
| "learning_rate": 2.6213592233009708e-05, | |
| "loss": 0.6728, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 2.7698195735374522, | |
| "grad_norm": 0.10157422721385956, | |
| "learning_rate": 2.5970873786407765e-05, | |
| "loss": 0.6064, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 2.7720065609622746, | |
| "grad_norm": 0.06446008384227753, | |
| "learning_rate": 2.5728155339805823e-05, | |
| "loss": 0.7651, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 2.774193548387097, | |
| "grad_norm": 0.07324625551700592, | |
| "learning_rate": 2.5485436893203883e-05, | |
| "loss": 0.6383, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.776380535811919, | |
| "grad_norm": 0.07557366043329239, | |
| "learning_rate": 2.524271844660194e-05, | |
| "loss": 0.704, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 2.7785675232367413, | |
| "grad_norm": 0.08134450018405914, | |
| "learning_rate": 2.4999999999999998e-05, | |
| "loss": 0.6886, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 2.7807545106615636, | |
| "grad_norm": 0.04855785891413689, | |
| "learning_rate": 2.475728155339806e-05, | |
| "loss": 0.741, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 2.782941498086386, | |
| "grad_norm": 0.06953386217355728, | |
| "learning_rate": 2.4514563106796116e-05, | |
| "loss": 0.5516, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 2.7851284855112084, | |
| "grad_norm": 0.10012605786323547, | |
| "learning_rate": 2.4271844660194173e-05, | |
| "loss": 0.7545, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 2.7873154729360308, | |
| "grad_norm": 0.157820925116539, | |
| "learning_rate": 2.4029126213592234e-05, | |
| "loss": 0.7965, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 2.789502460360853, | |
| "grad_norm": 0.04620638117194176, | |
| "learning_rate": 2.378640776699029e-05, | |
| "loss": 0.7223, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 2.791689447785675, | |
| "grad_norm": 0.08428886532783508, | |
| "learning_rate": 2.354368932038835e-05, | |
| "loss": 0.6298, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 2.7938764352104974, | |
| "grad_norm": 0.0942365750670433, | |
| "learning_rate": 2.3300970873786402e-05, | |
| "loss": 0.7266, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 2.79606342263532, | |
| "grad_norm": 0.05135050788521767, | |
| "learning_rate": 2.3058252427184466e-05, | |
| "loss": 0.7369, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.798250410060142, | |
| "grad_norm": 0.07809579372406006, | |
| "learning_rate": 2.281553398058252e-05, | |
| "loss": 0.743, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 2.8004373974849646, | |
| "grad_norm": 0.10320547968149185, | |
| "learning_rate": 2.2572815533980577e-05, | |
| "loss": 0.722, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 2.8026243849097865, | |
| "grad_norm": 0.09415378421545029, | |
| "learning_rate": 2.233009708737864e-05, | |
| "loss": 0.7528, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 2.8048113723346093, | |
| "grad_norm": 0.13103462755680084, | |
| "learning_rate": 2.2087378640776695e-05, | |
| "loss": 0.6785, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 2.8069983597594312, | |
| "grad_norm": 0.09520339220762253, | |
| "learning_rate": 2.1844660194174753e-05, | |
| "loss": 0.7469, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 2.8091853471842536, | |
| "grad_norm": 0.06156136468052864, | |
| "learning_rate": 2.1601941747572817e-05, | |
| "loss": 0.7619, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 2.811372334609076, | |
| "grad_norm": 0.07038269191980362, | |
| "learning_rate": 2.135922330097087e-05, | |
| "loss": 0.452, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 2.8135593220338984, | |
| "grad_norm": 0.07831636816263199, | |
| "learning_rate": 2.1116504854368928e-05, | |
| "loss": 0.7101, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 2.8157463094587207, | |
| "grad_norm": 0.08735121041536331, | |
| "learning_rate": 2.087378640776699e-05, | |
| "loss": 0.8627, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 2.8179332968835427, | |
| "grad_norm": 0.05308119207620621, | |
| "learning_rate": 2.0631067961165046e-05, | |
| "loss": 0.5359, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.8201202843083655, | |
| "grad_norm": 0.053742680698633194, | |
| "learning_rate": 2.0388349514563103e-05, | |
| "loss": 0.4116, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 2.8223072717331874, | |
| "grad_norm": 0.1100885346531868, | |
| "learning_rate": 2.014563106796116e-05, | |
| "loss": 0.9184, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 2.82449425915801, | |
| "grad_norm": 0.04482419416308403, | |
| "learning_rate": 1.990291262135922e-05, | |
| "loss": 0.4212, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 2.826681246582832, | |
| "grad_norm": 0.07495293021202087, | |
| "learning_rate": 1.966019417475728e-05, | |
| "loss": 0.8411, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 2.8288682340076545, | |
| "grad_norm": 0.06448440998792648, | |
| "learning_rate": 1.9417475728155336e-05, | |
| "loss": 0.6008, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 2.831055221432477, | |
| "grad_norm": 0.0787663534283638, | |
| "learning_rate": 1.9174757281553396e-05, | |
| "loss": 0.6465, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 2.833242208857299, | |
| "grad_norm": 0.08537033200263977, | |
| "learning_rate": 1.8932038834951454e-05, | |
| "loss": 0.7017, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 2.8354291962821216, | |
| "grad_norm": 0.058448903262615204, | |
| "learning_rate": 1.8689320388349514e-05, | |
| "loss": 0.4895, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 2.8376161837069436, | |
| "grad_norm": 0.05903905630111694, | |
| "learning_rate": 1.844660194174757e-05, | |
| "loss": 0.5857, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 2.839803171131766, | |
| "grad_norm": 0.1151174008846283, | |
| "learning_rate": 1.820388349514563e-05, | |
| "loss": 0.7771, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.8419901585565883, | |
| "grad_norm": 0.07729051262140274, | |
| "learning_rate": 1.7961165048543686e-05, | |
| "loss": 0.7188, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 2.8441771459814107, | |
| "grad_norm": 0.05661698058247566, | |
| "learning_rate": 1.7718446601941747e-05, | |
| "loss": 0.679, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 2.846364133406233, | |
| "grad_norm": 0.07814532518386841, | |
| "learning_rate": 1.7475728155339804e-05, | |
| "loss": 0.7158, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 2.848551120831055, | |
| "grad_norm": 0.08739367127418518, | |
| "learning_rate": 1.723300970873786e-05, | |
| "loss": 0.7517, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 2.8507381082558774, | |
| "grad_norm": 0.11283941566944122, | |
| "learning_rate": 1.6990291262135922e-05, | |
| "loss": 0.5734, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 2.8529250956806997, | |
| "grad_norm": 0.0747821256518364, | |
| "learning_rate": 1.674757281553398e-05, | |
| "loss": 0.5776, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 2.855112083105522, | |
| "grad_norm": 0.13224893808364868, | |
| "learning_rate": 1.6504854368932037e-05, | |
| "loss": 0.9154, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 2.8572990705303445, | |
| "grad_norm": 0.0987900048494339, | |
| "learning_rate": 1.6262135922330097e-05, | |
| "loss": 0.7162, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 2.859486057955167, | |
| "grad_norm": 0.11385104060173035, | |
| "learning_rate": 1.6019417475728155e-05, | |
| "loss": 0.6492, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 2.8616730453799892, | |
| "grad_norm": 0.0830446407198906, | |
| "learning_rate": 1.5776699029126212e-05, | |
| "loss": 0.5508, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.863860032804811, | |
| "grad_norm": 0.0805460587143898, | |
| "learning_rate": 1.5533980582524273e-05, | |
| "loss": 0.729, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 2.8660470202296335, | |
| "grad_norm": 0.04700075089931488, | |
| "learning_rate": 1.5291262135922327e-05, | |
| "loss": 0.4615, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 2.868234007654456, | |
| "grad_norm": 0.061646781861782074, | |
| "learning_rate": 1.5048543689320387e-05, | |
| "loss": 0.5481, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 2.8704209950792783, | |
| "grad_norm": 0.08027023822069168, | |
| "learning_rate": 1.4805825242718445e-05, | |
| "loss": 0.5735, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 2.8726079825041007, | |
| "grad_norm": 0.03735914081335068, | |
| "learning_rate": 1.4563106796116503e-05, | |
| "loss": 0.7346, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 2.874794969928923, | |
| "grad_norm": 0.12285737693309784, | |
| "learning_rate": 1.4320388349514562e-05, | |
| "loss": 0.4963, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 2.8769819573537454, | |
| "grad_norm": 0.07507744431495667, | |
| "learning_rate": 1.407766990291262e-05, | |
| "loss": 0.6827, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 2.8791689447785673, | |
| "grad_norm": 0.05776430293917656, | |
| "learning_rate": 1.3834951456310679e-05, | |
| "loss": 0.5822, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 2.8813559322033897, | |
| "grad_norm": 0.12840424478054047, | |
| "learning_rate": 1.3592233009708738e-05, | |
| "loss": 0.8266, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 2.883542919628212, | |
| "grad_norm": 0.06003335863351822, | |
| "learning_rate": 1.3349514563106795e-05, | |
| "loss": 0.9205, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.8857299070530344, | |
| "grad_norm": 0.12210050225257874, | |
| "learning_rate": 1.3106796116504854e-05, | |
| "loss": 0.6197, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 2.887916894477857, | |
| "grad_norm": 0.12230640649795532, | |
| "learning_rate": 1.2864077669902911e-05, | |
| "loss": 0.6101, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 2.890103881902679, | |
| "grad_norm": 0.11287673562765121, | |
| "learning_rate": 1.262135922330097e-05, | |
| "loss": 0.6778, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 2.8922908693275016, | |
| "grad_norm": 0.051977936178445816, | |
| "learning_rate": 1.237864077669903e-05, | |
| "loss": 0.5601, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 2.8944778567523235, | |
| "grad_norm": 0.07573337107896805, | |
| "learning_rate": 1.2135922330097087e-05, | |
| "loss": 0.6853, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 2.896664844177146, | |
| "grad_norm": 0.04770318791270256, | |
| "learning_rate": 1.1893203883495146e-05, | |
| "loss": 0.5123, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 2.8988518316019682, | |
| "grad_norm": 0.052877768874168396, | |
| "learning_rate": 1.1650485436893201e-05, | |
| "loss": 0.6725, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 2.9010388190267906, | |
| "grad_norm": 0.13692569732666016, | |
| "learning_rate": 1.140776699029126e-05, | |
| "loss": 0.6215, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 0.12803183495998383, | |
| "learning_rate": 1.116504854368932e-05, | |
| "loss": 0.7568, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 2.9054127938764354, | |
| "grad_norm": 0.11710110306739807, | |
| "learning_rate": 1.0922330097087376e-05, | |
| "loss": 0.5461, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.9075997813012577, | |
| "grad_norm": 0.05682337284088135, | |
| "learning_rate": 1.0679611650485435e-05, | |
| "loss": 0.4522, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 2.9097867687260797, | |
| "grad_norm": 0.07984013855457306, | |
| "learning_rate": 1.0436893203883494e-05, | |
| "loss": 0.7247, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 2.911973756150902, | |
| "grad_norm": 0.07054444402456284, | |
| "learning_rate": 1.0194174757281552e-05, | |
| "loss": 0.6919, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 2.9141607435757244, | |
| "grad_norm": 0.13668102025985718, | |
| "learning_rate": 9.95145631067961e-06, | |
| "loss": 0.6537, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 2.916347731000547, | |
| "grad_norm": 0.07120262086391449, | |
| "learning_rate": 9.708737864077668e-06, | |
| "loss": 0.6164, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 2.918534718425369, | |
| "grad_norm": 0.07631273567676544, | |
| "learning_rate": 9.466019417475727e-06, | |
| "loss": 0.5199, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 2.920721705850191, | |
| "grad_norm": 0.09133388847112656, | |
| "learning_rate": 9.223300970873786e-06, | |
| "loss": 0.6461, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 2.922908693275014, | |
| "grad_norm": 0.08395536988973618, | |
| "learning_rate": 8.980582524271843e-06, | |
| "loss": 0.8084, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 2.925095680699836, | |
| "grad_norm": 0.08669275045394897, | |
| "learning_rate": 8.737864077669902e-06, | |
| "loss": 0.6684, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 2.927282668124658, | |
| "grad_norm": 0.10852108895778656, | |
| "learning_rate": 8.495145631067961e-06, | |
| "loss": 0.9161, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.9294696555494806, | |
| "grad_norm": 0.05524509400129318, | |
| "learning_rate": 8.252427184466018e-06, | |
| "loss": 0.5381, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 2.931656642974303, | |
| "grad_norm": 0.08389575779438019, | |
| "learning_rate": 8.009708737864077e-06, | |
| "loss": 0.5115, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 2.9338436303991253, | |
| "grad_norm": 0.0699021965265274, | |
| "learning_rate": 7.766990291262136e-06, | |
| "loss": 0.791, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 2.9360306178239473, | |
| "grad_norm": 0.13843934237957, | |
| "learning_rate": 7.524271844660194e-06, | |
| "loss": 0.7158, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 2.93821760524877, | |
| "grad_norm": 0.07031416893005371, | |
| "learning_rate": 7.281553398058252e-06, | |
| "loss": 0.9197, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 2.940404592673592, | |
| "grad_norm": 0.07277525216341019, | |
| "learning_rate": 7.03883495145631e-06, | |
| "loss": 0.7927, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 2.9425915800984144, | |
| "grad_norm": 0.06796955317258835, | |
| "learning_rate": 6.796116504854369e-06, | |
| "loss": 0.5561, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 2.9447785675232367, | |
| "grad_norm": 0.09654916077852249, | |
| "learning_rate": 6.553398058252427e-06, | |
| "loss": 0.6991, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 2.946965554948059, | |
| "grad_norm": 0.05071938782930374, | |
| "learning_rate": 6.310679611650485e-06, | |
| "loss": 0.4628, | |
| "step": 1349 | |
| }, | |
| { | |
| "epoch": 2.9491525423728815, | |
| "grad_norm": 0.0948738306760788, | |
| "learning_rate": 6.067961165048543e-06, | |
| "loss": 0.5872, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.9513395297977034, | |
| "grad_norm": 0.09830582141876221, | |
| "learning_rate": 5.8252427184466006e-06, | |
| "loss": 0.6827, | |
| "step": 1351 | |
| }, | |
| { | |
| "epoch": 2.9535265172225262, | |
| "grad_norm": 0.06940705329179764, | |
| "learning_rate": 5.58252427184466e-06, | |
| "loss": 0.6454, | |
| "step": 1352 | |
| }, | |
| { | |
| "epoch": 2.955713504647348, | |
| "grad_norm": 0.10547400265932083, | |
| "learning_rate": 5.339805825242718e-06, | |
| "loss": 0.6769, | |
| "step": 1353 | |
| }, | |
| { | |
| "epoch": 2.9579004920721705, | |
| "grad_norm": 0.07688979804515839, | |
| "learning_rate": 5.097087378640776e-06, | |
| "loss": 0.6048, | |
| "step": 1354 | |
| }, | |
| { | |
| "epoch": 2.960087479496993, | |
| "grad_norm": 0.2706167995929718, | |
| "learning_rate": 4.854368932038834e-06, | |
| "loss": 0.9213, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 2.9622744669218153, | |
| "grad_norm": 0.07999067008495331, | |
| "learning_rate": 4.611650485436893e-06, | |
| "loss": 0.7272, | |
| "step": 1356 | |
| }, | |
| { | |
| "epoch": 2.9644614543466377, | |
| "grad_norm": 0.10268191993236542, | |
| "learning_rate": 4.368932038834951e-06, | |
| "loss": 0.7823, | |
| "step": 1357 | |
| }, | |
| { | |
| "epoch": 2.9666484417714596, | |
| "grad_norm": 0.2441248893737793, | |
| "learning_rate": 4.126213592233009e-06, | |
| "loss": 0.8201, | |
| "step": 1358 | |
| }, | |
| { | |
| "epoch": 2.968835429196282, | |
| "grad_norm": 0.07264109700918198, | |
| "learning_rate": 3.883495145631068e-06, | |
| "loss": 0.6676, | |
| "step": 1359 | |
| }, | |
| { | |
| "epoch": 2.9710224166211043, | |
| "grad_norm": 0.04458310455083847, | |
| "learning_rate": 3.640776699029126e-06, | |
| "loss": 0.6127, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.9732094040459267, | |
| "grad_norm": 0.08155495673418045, | |
| "learning_rate": 3.3980582524271844e-06, | |
| "loss": 0.5568, | |
| "step": 1361 | |
| }, | |
| { | |
| "epoch": 2.975396391470749, | |
| "grad_norm": 0.08346610516309738, | |
| "learning_rate": 3.1553398058252426e-06, | |
| "loss": 0.745, | |
| "step": 1362 | |
| }, | |
| { | |
| "epoch": 2.9775833788955715, | |
| "grad_norm": 0.0532771572470665, | |
| "learning_rate": 2.9126213592233003e-06, | |
| "loss": 0.5768, | |
| "step": 1363 | |
| }, | |
| { | |
| "epoch": 2.979770366320394, | |
| "grad_norm": 0.07556641101837158, | |
| "learning_rate": 2.669902912621359e-06, | |
| "loss": 0.5983, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 2.9819573537452158, | |
| "grad_norm": 0.09355679899454117, | |
| "learning_rate": 2.427184466019417e-06, | |
| "loss": 0.7703, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 2.984144341170038, | |
| "grad_norm": 0.11744330078363419, | |
| "learning_rate": 2.1844660194174755e-06, | |
| "loss": 0.8041, | |
| "step": 1366 | |
| }, | |
| { | |
| "epoch": 2.9863313285948605, | |
| "grad_norm": 0.08273312449455261, | |
| "learning_rate": 1.941747572815534e-06, | |
| "loss": 0.6304, | |
| "step": 1367 | |
| }, | |
| { | |
| "epoch": 2.988518316019683, | |
| "grad_norm": 0.05637767165899277, | |
| "learning_rate": 1.6990291262135922e-06, | |
| "loss": 0.5463, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 2.9907053034445052, | |
| "grad_norm": 0.05977838486433029, | |
| "learning_rate": 1.4563106796116501e-06, | |
| "loss": 0.713, | |
| "step": 1369 | |
| }, | |
| { | |
| "epoch": 2.9928922908693276, | |
| "grad_norm": 0.12927646934986115, | |
| "learning_rate": 1.2135922330097085e-06, | |
| "loss": 0.7835, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.99507927829415, | |
| "grad_norm": 0.06799733638763428, | |
| "learning_rate": 9.70873786407767e-07, | |
| "loss": 0.5287, | |
| "step": 1371 | |
| }, | |
| { | |
| "epoch": 2.997266265718972, | |
| "grad_norm": 0.1508604884147644, | |
| "learning_rate": 7.281553398058251e-07, | |
| "loss": 0.736, | |
| "step": 1372 | |
| }, | |
| { | |
| "epoch": 2.9994532531437943, | |
| "grad_norm": 0.07225324213504791, | |
| "learning_rate": 4.854368932038835e-07, | |
| "loss": 0.6506, | |
| "step": 1373 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.6369936466217041, | |
| "learning_rate": 2.4271844660194176e-07, | |
| "loss": 0.5374, | |
| "step": 1374 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1374, | |
| "total_flos": 8.559517791567217e+17, | |
| "train_loss": 0.6696628328667631, | |
| "train_runtime": 2857.8712, | |
| "train_samples_per_second": 7.68, | |
| "train_steps_per_second": 0.481 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1374, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.559517791567217e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |