| { | |
| "best_metric": 56.720179204778795, | |
| "best_model_checkpoint": "CharisMsensis/whisper-small-el/checkpoint-13000", | |
| "epoch": 14.412416851441241, | |
| "global_step": 13000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.2000000000000006e-07, | |
| "loss": 3.1562, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.200000000000001e-07, | |
| "loss": 2.4611, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.42e-06, | |
| "loss": 1.9694, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9200000000000003e-06, | |
| "loss": 1.4359, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.42e-06, | |
| "loss": 1.0483, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.92e-06, | |
| "loss": 1.0062, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 3.4200000000000007e-06, | |
| "loss": 0.9075, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.920000000000001e-06, | |
| "loss": 0.844, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.42e-06, | |
| "loss": 0.7819, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.92e-06, | |
| "loss": 0.7264, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 5.420000000000001e-06, | |
| "loss": 0.6921, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 5.92e-06, | |
| "loss": 0.6049, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.42e-06, | |
| "loss": 0.4845, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.92e-06, | |
| "loss": 0.2998, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 7.420000000000001e-06, | |
| "loss": 0.3535, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 7.92e-06, | |
| "loss": 0.3455, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 8.42e-06, | |
| "loss": 0.3208, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 8.920000000000001e-06, | |
| "loss": 0.2778, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.42e-06, | |
| "loss": 0.2778, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.920000000000002e-06, | |
| "loss": 0.3278, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 9.98551724137931e-06, | |
| "loss": 0.274, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.968275862068967e-06, | |
| "loss": 0.2801, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 9.951034482758622e-06, | |
| "loss": 0.3386, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.933793103448277e-06, | |
| "loss": 0.3055, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 9.916551724137932e-06, | |
| "loss": 0.3042, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.899310344827588e-06, | |
| "loss": 0.2838, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 9.882068965517241e-06, | |
| "loss": 0.3243, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 9.864827586206898e-06, | |
| "loss": 0.2682, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.847586206896553e-06, | |
| "loss": 0.3168, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 9.830344827586208e-06, | |
| "loss": 0.3085, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.813103448275862e-06, | |
| "loss": 0.2691, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 9.795862068965517e-06, | |
| "loss": 0.2721, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.778620689655172e-06, | |
| "loss": 0.2691, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.761379310344829e-06, | |
| "loss": 0.2797, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.744137931034484e-06, | |
| "loss": 0.257, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.726896551724139e-06, | |
| "loss": 0.2606, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.709655172413795e-06, | |
| "loss": 0.1628, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.692413793103448e-06, | |
| "loss": 0.1529, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.675172413793105e-06, | |
| "loss": 0.162, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.65793103448276e-06, | |
| "loss": 0.1399, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "eval_loss": 0.29191553592681885, | |
| "eval_runtime": 3943.1183, | |
| "eval_samples_per_second": 0.432, | |
| "eval_steps_per_second": 0.054, | |
| "eval_wer": 243.45715885756954, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 9.640689655172415e-06, | |
| "loss": 0.1552, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 9.62344827586207e-06, | |
| "loss": 0.147, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 9.606206896551726e-06, | |
| "loss": 0.1647, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 9.58896551724138e-06, | |
| "loss": 0.165, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 9.571724137931036e-06, | |
| "loss": 0.1502, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 9.55448275862069e-06, | |
| "loss": 0.1293, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 9.537241379310345e-06, | |
| "loss": 0.1541, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 9.52e-06, | |
| "loss": 0.1492, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 9.502758620689655e-06, | |
| "loss": 0.1632, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 9.48551724137931e-06, | |
| "loss": 0.1591, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 9.468275862068967e-06, | |
| "loss": 0.1542, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 9.451034482758622e-06, | |
| "loss": 0.1511, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 9.433793103448276e-06, | |
| "loss": 0.15, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 9.416551724137933e-06, | |
| "loss": 0.155, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 9.399310344827586e-06, | |
| "loss": 0.1381, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 9.382068965517243e-06, | |
| "loss": 0.1636, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.364827586206898e-06, | |
| "loss": 0.1611, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.347586206896552e-06, | |
| "loss": 0.1522, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.330344827586207e-06, | |
| "loss": 0.1359, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 9.313103448275864e-06, | |
| "loss": 0.1388, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 9.295862068965517e-06, | |
| "loss": 0.1512, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 9.278620689655174e-06, | |
| "loss": 0.1284, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 9.261379310344828e-06, | |
| "loss": 0.1484, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 9.244137931034483e-06, | |
| "loss": 0.1303, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.226896551724138e-06, | |
| "loss": 0.1743, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 9.209655172413793e-06, | |
| "loss": 0.1655, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 9.192413793103448e-06, | |
| "loss": 0.1327, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 9.175172413793105e-06, | |
| "loss": 0.1412, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 9.15793103448276e-06, | |
| "loss": 0.1346, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 9.140689655172414e-06, | |
| "loss": 0.1258, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 9.12344827586207e-06, | |
| "loss": 0.1702, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 9.106206896551724e-06, | |
| "loss": 0.1444, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 9.08896551724138e-06, | |
| "loss": 0.0881, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 9.071724137931035e-06, | |
| "loss": 0.0722, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 9.05448275862069e-06, | |
| "loss": 0.0726, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 9.037241379310345e-06, | |
| "loss": 0.0635, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 9.020000000000002e-06, | |
| "loss": 0.0673, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 9.002758620689655e-06, | |
| "loss": 0.0658, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 8.985517241379311e-06, | |
| "loss": 0.0817, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 8.968275862068966e-06, | |
| "loss": 0.0726, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "eval_loss": 0.2807031571865082, | |
| "eval_runtime": 3432.3667, | |
| "eval_samples_per_second": 0.496, | |
| "eval_steps_per_second": 0.062, | |
| "eval_wer": 110.51894717192458, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 8.951034482758621e-06, | |
| "loss": 0.0802, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 8.933793103448276e-06, | |
| "loss": 0.0702, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 8.916551724137931e-06, | |
| "loss": 0.0821, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 8.899310344827588e-06, | |
| "loss": 0.0825, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 8.882068965517242e-06, | |
| "loss": 0.0718, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 8.864827586206897e-06, | |
| "loss": 0.075, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 8.847586206896552e-06, | |
| "loss": 0.0818, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 8.830344827586209e-06, | |
| "loss": 0.0735, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.813103448275862e-06, | |
| "loss": 0.0636, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.795862068965518e-06, | |
| "loss": 0.0683, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 8.778620689655173e-06, | |
| "loss": 0.0775, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 8.761379310344828e-06, | |
| "loss": 0.072, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 8.744137931034483e-06, | |
| "loss": 0.0724, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 8.72689655172414e-06, | |
| "loss": 0.0658, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.709655172413793e-06, | |
| "loss": 0.0762, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 8.69241379310345e-06, | |
| "loss": 0.0886, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 8.675172413793104e-06, | |
| "loss": 0.0792, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 8.657931034482759e-06, | |
| "loss": 0.0667, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 8.640689655172414e-06, | |
| "loss": 0.0753, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.623448275862069e-06, | |
| "loss": 0.0765, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.606206896551725e-06, | |
| "loss": 0.0729, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.58896551724138e-06, | |
| "loss": 0.0687, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.571724137931035e-06, | |
| "loss": 0.0715, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.55448275862069e-06, | |
| "loss": 0.0777, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.537241379310347e-06, | |
| "loss": 0.0787, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.52e-06, | |
| "loss": 0.0659, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.502758620689656e-06, | |
| "loss": 0.0759, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.485517241379311e-06, | |
| "loss": 0.0714, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 8.468275862068966e-06, | |
| "loss": 0.053, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 8.451034482758621e-06, | |
| "loss": 0.0387, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 8.433793103448277e-06, | |
| "loss": 0.0339, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 8.41655172413793e-06, | |
| "loss": 0.0331, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 8.399310344827587e-06, | |
| "loss": 0.0363, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 8.382068965517242e-06, | |
| "loss": 0.0434, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 8.364827586206897e-06, | |
| "loss": 0.0376, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 8.347586206896552e-06, | |
| "loss": 0.0423, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 8.330344827586208e-06, | |
| "loss": 0.0407, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 8.313103448275863e-06, | |
| "loss": 0.0415, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 8.295862068965518e-06, | |
| "loss": 0.0379, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 8.278620689655173e-06, | |
| "loss": 0.0404, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "eval_loss": 0.3070586323738098, | |
| "eval_runtime": 3252.4732, | |
| "eval_samples_per_second": 0.524, | |
| "eval_steps_per_second": 0.065, | |
| "eval_wer": 135.31827515400408, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 8.261379310344828e-06, | |
| "loss": 0.0406, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 8.244137931034484e-06, | |
| "loss": 0.042, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 8.226896551724138e-06, | |
| "loss": 0.0429, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 8.209655172413794e-06, | |
| "loss": 0.0436, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 8.192413793103449e-06, | |
| "loss": 0.0427, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 8.175172413793104e-06, | |
| "loss": 0.0462, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 8.157931034482759e-06, | |
| "loss": 0.0376, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 8.140689655172415e-06, | |
| "loss": 0.0503, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 8.123448275862069e-06, | |
| "loss": 0.0403, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 8.106206896551725e-06, | |
| "loss": 0.0386, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 8.08896551724138e-06, | |
| "loss": 0.0437, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 8.071724137931035e-06, | |
| "loss": 0.0383, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 8.054482758620691e-06, | |
| "loss": 0.034, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 8.037241379310346e-06, | |
| "loss": 0.0412, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 8.020000000000001e-06, | |
| "loss": 0.0399, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 8.002758620689656e-06, | |
| "loss": 0.0479, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 7.985517241379311e-06, | |
| "loss": 0.0539, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 7.968275862068966e-06, | |
| "loss": 0.0385, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 7.951034482758622e-06, | |
| "loss": 0.0463, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 7.933793103448275e-06, | |
| "loss": 0.0328, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 7.916551724137932e-06, | |
| "loss": 0.042, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 7.899310344827587e-06, | |
| "loss": 0.0413, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 7.882068965517242e-06, | |
| "loss": 0.0383, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 7.864827586206897e-06, | |
| "loss": 0.0487, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 7.847586206896553e-06, | |
| "loss": 0.0187, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 7.830344827586206e-06, | |
| "loss": 0.0174, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 7.813103448275863e-06, | |
| "loss": 0.0206, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 7.795862068965518e-06, | |
| "loss": 0.0293, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 7.778620689655173e-06, | |
| "loss": 0.02, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 7.76137931034483e-06, | |
| "loss": 0.0195, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 7.744137931034484e-06, | |
| "loss": 0.0205, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 7.726896551724139e-06, | |
| "loss": 0.0191, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 7.709655172413794e-06, | |
| "loss": 0.016, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 7.692413793103449e-06, | |
| "loss": 0.0232, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 7.675172413793104e-06, | |
| "loss": 0.0177, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 7.65793103448276e-06, | |
| "loss": 0.0233, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 7.640689655172413e-06, | |
| "loss": 0.0223, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 7.62344827586207e-06, | |
| "loss": 0.027, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 7.606206896551725e-06, | |
| "loss": 0.0246, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 7.58896551724138e-06, | |
| "loss": 0.0247, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "eval_loss": 0.32613393664360046, | |
| "eval_runtime": 3013.158, | |
| "eval_samples_per_second": 0.566, | |
| "eval_steps_per_second": 0.071, | |
| "eval_wer": 69.33918237819675, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 7.5717241379310345e-06, | |
| "loss": 0.0206, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 7.55448275862069e-06, | |
| "loss": 0.0214, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 7.537241379310345e-06, | |
| "loss": 0.0289, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 7.520000000000001e-06, | |
| "loss": 0.0182, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 7.5027586206896566e-06, | |
| "loss": 0.0269, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 7.485517241379311e-06, | |
| "loss": 0.0273, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 7.468275862068966e-06, | |
| "loss": 0.0319, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 7.451724137931035e-06, | |
| "loss": 0.0293, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 7.4344827586206906e-06, | |
| "loss": 0.0226, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 7.417241379310346e-06, | |
| "loss": 0.0231, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 7.4e-06, | |
| "loss": 0.0199, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 7.382758620689656e-06, | |
| "loss": 0.0228, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 7.365517241379311e-06, | |
| "loss": 0.0303, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 7.348275862068967e-06, | |
| "loss": 0.023, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 7.3310344827586215e-06, | |
| "loss": 0.0297, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 7.313793103448276e-06, | |
| "loss": 0.0409, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 7.296551724137931e-06, | |
| "loss": 0.0179, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 7.279310344827587e-06, | |
| "loss": 0.0288, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 7.262068965517242e-06, | |
| "loss": 0.0339, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 7.2448275862068975e-06, | |
| "loss": 0.0278, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 7.2275862068965515e-06, | |
| "loss": 0.0154, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 7.210344827586207e-06, | |
| "loss": 0.0091, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 7.193103448275862e-06, | |
| "loss": 0.0105, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 7.175862068965518e-06, | |
| "loss": 0.0122, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 7.1586206896551736e-06, | |
| "loss": 0.0102, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 7.1413793103448284e-06, | |
| "loss": 0.0111, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 7.124137931034484e-06, | |
| "loss": 0.0102, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 7.106896551724138e-06, | |
| "loss": 0.0154, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 7.089655172413794e-06, | |
| "loss": 0.0145, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 7.072413793103449e-06, | |
| "loss": 0.0159, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 7.0551724137931045e-06, | |
| "loss": 0.0073, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 7.037931034482759e-06, | |
| "loss": 0.008, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 7.020689655172414e-06, | |
| "loss": 0.0117, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 7.003448275862069e-06, | |
| "loss": 0.0109, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 6.986206896551725e-06, | |
| "loss": 0.0192, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 6.96896551724138e-06, | |
| "loss": 0.0077, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 6.951724137931035e-06, | |
| "loss": 0.0114, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 6.934482758620689e-06, | |
| "loss": 0.0118, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 6.917241379310345e-06, | |
| "loss": 0.0133, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 6.9e-06, | |
| "loss": 0.0102, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "eval_loss": 0.37662339210510254, | |
| "eval_runtime": 3307.2582, | |
| "eval_samples_per_second": 0.515, | |
| "eval_steps_per_second": 0.064, | |
| "eval_wer": 99.71065895090535, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 6.882758620689656e-06, | |
| "loss": 0.0075, | |
| "step": 5025 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 6.8655172413793114e-06, | |
| "loss": 0.0238, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 6.848275862068966e-06, | |
| "loss": 0.0124, | |
| "step": 5075 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 6.831034482758622e-06, | |
| "loss": 0.0132, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 6.813793103448276e-06, | |
| "loss": 0.0257, | |
| "step": 5125 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 6.796551724137932e-06, | |
| "loss": 0.0153, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 6.779310344827587e-06, | |
| "loss": 0.0154, | |
| "step": 5175 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 6.762068965517242e-06, | |
| "loss": 0.0098, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 6.744827586206897e-06, | |
| "loss": 0.0175, | |
| "step": 5225 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 6.727586206896552e-06, | |
| "loss": 0.0142, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 6.710344827586207e-06, | |
| "loss": 0.0193, | |
| "step": 5275 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 6.693103448275863e-06, | |
| "loss": 0.0133, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 6.6758620689655176e-06, | |
| "loss": 0.0061, | |
| "step": 5325 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 6.658620689655173e-06, | |
| "loss": 0.0127, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 6.641379310344827e-06, | |
| "loss": 0.0109, | |
| "step": 5375 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 6.624137931034483e-06, | |
| "loss": 0.0135, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 6.606896551724139e-06, | |
| "loss": 0.0113, | |
| "step": 5425 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 6.589655172413794e-06, | |
| "loss": 0.006, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 6.572413793103449e-06, | |
| "loss": 0.0089, | |
| "step": 5475 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 6.555172413793104e-06, | |
| "loss": 0.007, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 6.53793103448276e-06, | |
| "loss": 0.0072, | |
| "step": 5525 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 6.520689655172414e-06, | |
| "loss": 0.0063, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 6.50344827586207e-06, | |
| "loss": 0.008, | |
| "step": 5575 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 6.4862068965517245e-06, | |
| "loss": 0.0111, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 6.46896551724138e-06, | |
| "loss": 0.0042, | |
| "step": 5625 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 6.451724137931035e-06, | |
| "loss": 0.005, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 6.43448275862069e-06, | |
| "loss": 0.003, | |
| "step": 5675 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 6.417241379310345e-06, | |
| "loss": 0.0048, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.0034, | |
| "step": 5725 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 6.3827586206896554e-06, | |
| "loss": 0.014, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 6.365517241379311e-06, | |
| "loss": 0.0035, | |
| "step": 5775 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 6.348275862068965e-06, | |
| "loss": 0.005, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 6.331034482758621e-06, | |
| "loss": 0.0033, | |
| "step": 5825 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 6.313793103448277e-06, | |
| "loss": 0.0057, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 6.2965517241379315e-06, | |
| "loss": 0.009, | |
| "step": 5875 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 6.279310344827587e-06, | |
| "loss": 0.0075, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 6.262068965517242e-06, | |
| "loss": 0.0045, | |
| "step": 5925 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 6.244827586206898e-06, | |
| "loss": 0.0021, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 6.227586206896552e-06, | |
| "loss": 0.0061, | |
| "step": 5975 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 6.2103448275862075e-06, | |
| "loss": 0.0096, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "eval_loss": 0.4006502330303192, | |
| "eval_runtime": 2903.7003, | |
| "eval_samples_per_second": 0.587, | |
| "eval_steps_per_second": 0.073, | |
| "eval_wer": 83.9835728952772, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 6.193103448275862e-06, | |
| "loss": 0.0041, | |
| "step": 6025 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 6.175862068965518e-06, | |
| "loss": 0.0089, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 6.158620689655173e-06, | |
| "loss": 0.0055, | |
| "step": 6075 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 6.141379310344828e-06, | |
| "loss": 0.0073, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 6.124137931034483e-06, | |
| "loss": 0.0069, | |
| "step": 6125 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 6.1068965517241384e-06, | |
| "loss": 0.0059, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 6.089655172413793e-06, | |
| "loss": 0.0071, | |
| "step": 6175 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 6.072413793103449e-06, | |
| "loss": 0.0053, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 6.055172413793105e-06, | |
| "loss": 0.0074, | |
| "step": 6225 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 6.037931034482759e-06, | |
| "loss": 0.007, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 6.0206896551724145e-06, | |
| "loss": 0.0079, | |
| "step": 6275 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 6.003448275862069e-06, | |
| "loss": 0.0111, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 5.986206896551725e-06, | |
| "loss": 0.0052, | |
| "step": 6325 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 5.96896551724138e-06, | |
| "loss": 0.0055, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 5.951724137931036e-06, | |
| "loss": 0.0035, | |
| "step": 6375 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 5.93448275862069e-06, | |
| "loss": 0.0031, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 5.917241379310345e-06, | |
| "loss": 0.0015, | |
| "step": 6425 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 5.9e-06, | |
| "loss": 0.0055, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 5.882758620689656e-06, | |
| "loss": 0.0128, | |
| "step": 6475 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 5.865517241379311e-06, | |
| "loss": 0.0028, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 5.848275862068966e-06, | |
| "loss": 0.0025, | |
| "step": 6525 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 5.831034482758621e-06, | |
| "loss": 0.002, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 5.813793103448276e-06, | |
| "loss": 0.0061, | |
| "step": 6575 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 5.796551724137931e-06, | |
| "loss": 0.001, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 5.779310344827587e-06, | |
| "loss": 0.0021, | |
| "step": 6625 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 5.762068965517243e-06, | |
| "loss": 0.0008, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 5.744827586206897e-06, | |
| "loss": 0.0048, | |
| "step": 6675 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 5.727586206896552e-06, | |
| "loss": 0.0017, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 5.710344827586207e-06, | |
| "loss": 0.0032, | |
| "step": 6725 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 5.693103448275863e-06, | |
| "loss": 0.0033, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 5.675862068965518e-06, | |
| "loss": 0.0093, | |
| "step": 6775 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 5.6586206896551735e-06, | |
| "loss": 0.002, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 5.6413793103448275e-06, | |
| "loss": 0.0036, | |
| "step": 6825 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 5.624137931034483e-06, | |
| "loss": 0.0066, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 5.606896551724138e-06, | |
| "loss": 0.0009, | |
| "step": 6875 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 5.589655172413794e-06, | |
| "loss": 0.0007, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 5.572413793103449e-06, | |
| "loss": 0.0035, | |
| "step": 6925 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 5.555172413793104e-06, | |
| "loss": 0.0018, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 5.5379310344827585e-06, | |
| "loss": 0.0061, | |
| "step": 6975 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 5.520689655172414e-06, | |
| "loss": 0.0098, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "eval_loss": 0.42961812019348145, | |
| "eval_runtime": 2962.2311, | |
| "eval_samples_per_second": 0.575, | |
| "eval_steps_per_second": 0.072, | |
| "eval_wer": 97.61993653164083, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 5.503448275862069e-06, | |
| "loss": 0.0019, | |
| "step": 7025 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 5.486206896551725e-06, | |
| "loss": 0.0106, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 5.4689655172413805e-06, | |
| "loss": 0.0074, | |
| "step": 7075 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 5.4517241379310345e-06, | |
| "loss": 0.0044, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 5.43448275862069e-06, | |
| "loss": 0.0065, | |
| "step": 7125 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 5.417241379310345e-06, | |
| "loss": 0.0072, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 5.400000000000001e-06, | |
| "loss": 0.0049, | |
| "step": 7175 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 5.382758620689656e-06, | |
| "loss": 0.0041, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 5.365517241379311e-06, | |
| "loss": 0.0045, | |
| "step": 7225 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 5.3482758620689654e-06, | |
| "loss": 0.0013, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 5.331034482758621e-06, | |
| "loss": 0.0011, | |
| "step": 7275 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 5.313793103448276e-06, | |
| "loss": 0.0015, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 5.296551724137932e-06, | |
| "loss": 0.0026, | |
| "step": 7325 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 5.279310344827587e-06, | |
| "loss": 0.0082, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 5.2620689655172415e-06, | |
| "loss": 0.0022, | |
| "step": 7375 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 5.244827586206896e-06, | |
| "loss": 0.0026, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 5.227586206896552e-06, | |
| "loss": 0.0034, | |
| "step": 7425 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 5.210344827586208e-06, | |
| "loss": 0.0017, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 5.193103448275863e-06, | |
| "loss": 0.0042, | |
| "step": 7475 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 5.175862068965518e-06, | |
| "loss": 0.002, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 5.158620689655172e-06, | |
| "loss": 0.0032, | |
| "step": 7525 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 5.141379310344828e-06, | |
| "loss": 0.0027, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 5.124137931034483e-06, | |
| "loss": 0.0013, | |
| "step": 7575 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 5.106896551724139e-06, | |
| "loss": 0.0026, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 5.0896551724137936e-06, | |
| "loss": 0.0007, | |
| "step": 7625 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 5.072413793103449e-06, | |
| "loss": 0.0014, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 5.055172413793103e-06, | |
| "loss": 0.0039, | |
| "step": 7675 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 5.037931034482759e-06, | |
| "loss": 0.0012, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 5.020689655172414e-06, | |
| "loss": 0.0004, | |
| "step": 7725 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 5.00344827586207e-06, | |
| "loss": 0.0009, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 4.9862068965517245e-06, | |
| "loss": 0.0044, | |
| "step": 7775 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 4.968965517241379e-06, | |
| "loss": 0.0026, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 4.951724137931035e-06, | |
| "loss": 0.0011, | |
| "step": 7825 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 4.93448275862069e-06, | |
| "loss": 0.005, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 4.917241379310345e-06, | |
| "loss": 0.0014, | |
| "step": 7875 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0023, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 4.882758620689655e-06, | |
| "loss": 0.0009, | |
| "step": 7925 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 4.86551724137931e-06, | |
| "loss": 0.0029, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 4.848275862068966e-06, | |
| "loss": 0.0014, | |
| "step": 7975 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 4.831034482758621e-06, | |
| "loss": 0.0045, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "eval_loss": 0.43131473660469055, | |
| "eval_runtime": 2955.3959, | |
| "eval_samples_per_second": 0.577, | |
| "eval_steps_per_second": 0.072, | |
| "eval_wer": 96.49990666417771, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 4.813793103448276e-06, | |
| "loss": 0.0098, | |
| "step": 8025 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 4.7965517241379314e-06, | |
| "loss": 0.001, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "learning_rate": 4.779310344827587e-06, | |
| "loss": 0.0032, | |
| "step": 8075 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 4.762068965517242e-06, | |
| "loss": 0.0011, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 4.744827586206897e-06, | |
| "loss": 0.0028, | |
| "step": 8125 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 4.727586206896553e-06, | |
| "loss": 0.0006, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 4.7103448275862075e-06, | |
| "loss": 0.0003, | |
| "step": 8175 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 4.693103448275862e-06, | |
| "loss": 0.0006, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 4.675862068965517e-06, | |
| "loss": 0.0019, | |
| "step": 8225 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 4.658620689655173e-06, | |
| "loss": 0.0011, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 4.641379310344828e-06, | |
| "loss": 0.0012, | |
| "step": 8275 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "learning_rate": 4.624137931034483e-06, | |
| "loss": 0.0018, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 4.606896551724138e-06, | |
| "loss": 0.0009, | |
| "step": 8325 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 4.589655172413793e-06, | |
| "loss": 0.001, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 4.572413793103448e-06, | |
| "loss": 0.0022, | |
| "step": 8375 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 4.555172413793104e-06, | |
| "loss": 0.0004, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 4.537931034482759e-06, | |
| "loss": 0.0006, | |
| "step": 8425 | |
| }, | |
| { | |
| "epoch": 9.37, | |
| "learning_rate": 4.5206896551724144e-06, | |
| "loss": 0.0028, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 4.503448275862069e-06, | |
| "loss": 0.0007, | |
| "step": 8475 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 4.486896551724138e-06, | |
| "loss": 0.0066, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 4.4696551724137936e-06, | |
| "loss": 0.0026, | |
| "step": 8525 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "learning_rate": 4.452413793103449e-06, | |
| "loss": 0.0012, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 4.435172413793104e-06, | |
| "loss": 0.0007, | |
| "step": 8575 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 4.417931034482759e-06, | |
| "loss": 0.0012, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 4.400689655172414e-06, | |
| "loss": 0.0004, | |
| "step": 8625 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 4.38344827586207e-06, | |
| "loss": 0.0009, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 4.3662068965517245e-06, | |
| "loss": 0.0009, | |
| "step": 8675 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 4.348965517241379e-06, | |
| "loss": 0.0015, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 4.331724137931035e-06, | |
| "loss": 0.003, | |
| "step": 8725 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 4.31448275862069e-06, | |
| "loss": 0.0004, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "learning_rate": 4.297241379310345e-06, | |
| "loss": 0.001, | |
| "step": 8775 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 4.2800000000000005e-06, | |
| "loss": 0.0016, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "learning_rate": 4.262758620689655e-06, | |
| "loss": 0.0005, | |
| "step": 8825 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 4.24551724137931e-06, | |
| "loss": 0.0006, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 4.228275862068966e-06, | |
| "loss": 0.0031, | |
| "step": 8875 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 4.211034482758621e-06, | |
| "loss": 0.0005, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 4.193793103448276e-06, | |
| "loss": 0.0003, | |
| "step": 8925 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 4.1765517241379314e-06, | |
| "loss": 0.0034, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 4.159310344827587e-06, | |
| "loss": 0.0011, | |
| "step": 8975 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 4.142068965517242e-06, | |
| "loss": 0.0004, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "eval_loss": 0.44217491149902344, | |
| "eval_runtime": 3046.7205, | |
| "eval_samples_per_second": 0.559, | |
| "eval_steps_per_second": 0.07, | |
| "eval_wer": 83.05021467239126, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 10.01, | |
| "learning_rate": 4.124827586206897e-06, | |
| "loss": 0.0005, | |
| "step": 9025 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 4.107586206896552e-06, | |
| "loss": 0.0003, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 10.06, | |
| "learning_rate": 4.0903448275862075e-06, | |
| "loss": 0.0023, | |
| "step": 9075 | |
| }, | |
| { | |
| "epoch": 10.09, | |
| "learning_rate": 4.073103448275862e-06, | |
| "loss": 0.0002, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 10.12, | |
| "learning_rate": 4.055862068965517e-06, | |
| "loss": 0.0002, | |
| "step": 9125 | |
| }, | |
| { | |
| "epoch": 10.14, | |
| "learning_rate": 4.038620689655173e-06, | |
| "loss": 0.0002, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 10.17, | |
| "learning_rate": 4.021379310344828e-06, | |
| "loss": 0.0003, | |
| "step": 9175 | |
| }, | |
| { | |
| "epoch": 10.2, | |
| "learning_rate": 4.004137931034483e-06, | |
| "loss": 0.0002, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 3.986896551724138e-06, | |
| "loss": 0.0046, | |
| "step": 9225 | |
| }, | |
| { | |
| "epoch": 10.25, | |
| "learning_rate": 3.969655172413793e-06, | |
| "loss": 0.0003, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 3.952413793103448e-06, | |
| "loss": 0.0002, | |
| "step": 9275 | |
| }, | |
| { | |
| "epoch": 10.31, | |
| "learning_rate": 3.935172413793104e-06, | |
| "loss": 0.0001, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 10.34, | |
| "learning_rate": 3.917931034482759e-06, | |
| "loss": 0.0002, | |
| "step": 9325 | |
| }, | |
| { | |
| "epoch": 10.37, | |
| "learning_rate": 3.9006896551724144e-06, | |
| "loss": 0.0002, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 10.39, | |
| "learning_rate": 3.883448275862069e-06, | |
| "loss": 0.0049, | |
| "step": 9375 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "learning_rate": 3.866206896551725e-06, | |
| "loss": 0.0005, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 10.45, | |
| "learning_rate": 3.84896551724138e-06, | |
| "loss": 0.0011, | |
| "step": 9425 | |
| }, | |
| { | |
| "epoch": 10.48, | |
| "learning_rate": 3.831724137931035e-06, | |
| "loss": 0.0007, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "learning_rate": 3.81448275862069e-06, | |
| "loss": 0.0002, | |
| "step": 9475 | |
| }, | |
| { | |
| "epoch": 10.53, | |
| "learning_rate": 3.7972413793103454e-06, | |
| "loss": 0.0002, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0016, | |
| "step": 9525 | |
| }, | |
| { | |
| "epoch": 10.59, | |
| "learning_rate": 3.7627586206896555e-06, | |
| "loss": 0.0012, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 3.745517241379311e-06, | |
| "loss": 0.0028, | |
| "step": 9575 | |
| }, | |
| { | |
| "epoch": 10.64, | |
| "learning_rate": 3.7282758620689657e-06, | |
| "loss": 0.0002, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 10.67, | |
| "learning_rate": 3.711034482758621e-06, | |
| "loss": 0.0004, | |
| "step": 9625 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "learning_rate": 3.693793103448276e-06, | |
| "loss": 0.0002, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 10.73, | |
| "learning_rate": 3.676551724137931e-06, | |
| "loss": 0.0002, | |
| "step": 9675 | |
| }, | |
| { | |
| "epoch": 10.75, | |
| "learning_rate": 3.6593103448275864e-06, | |
| "loss": 0.0002, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 10.78, | |
| "learning_rate": 3.6420689655172413e-06, | |
| "loss": 0.0015, | |
| "step": 9725 | |
| }, | |
| { | |
| "epoch": 10.81, | |
| "learning_rate": 3.624827586206897e-06, | |
| "loss": 0.0002, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 10.84, | |
| "learning_rate": 3.6075862068965523e-06, | |
| "loss": 0.0005, | |
| "step": 9775 | |
| }, | |
| { | |
| "epoch": 10.86, | |
| "learning_rate": 3.590344827586207e-06, | |
| "loss": 0.0003, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 10.89, | |
| "learning_rate": 3.5731034482758625e-06, | |
| "loss": 0.0004, | |
| "step": 9825 | |
| }, | |
| { | |
| "epoch": 10.92, | |
| "learning_rate": 3.5558620689655178e-06, | |
| "loss": 0.002, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 10.95, | |
| "learning_rate": 3.5386206896551726e-06, | |
| "loss": 0.0002, | |
| "step": 9875 | |
| }, | |
| { | |
| "epoch": 10.98, | |
| "learning_rate": 3.521379310344828e-06, | |
| "loss": 0.0002, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 3.5041379310344832e-06, | |
| "loss": 0.0004, | |
| "step": 9925 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 3.486896551724138e-06, | |
| "loss": 0.0013, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 3.4696551724137934e-06, | |
| "loss": 0.0008, | |
| "step": 9975 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 3.4524137931034487e-06, | |
| "loss": 0.0002, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "eval_loss": 0.45697087049484253, | |
| "eval_runtime": 3012.0295, | |
| "eval_samples_per_second": 0.566, | |
| "eval_steps_per_second": 0.071, | |
| "eval_wer": 78.59809594922531, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 3.4351724137931036e-06, | |
| "loss": 0.0006, | |
| "step": 10025 | |
| }, | |
| { | |
| "epoch": 11.14, | |
| "learning_rate": 3.417931034482759e-06, | |
| "loss": 0.0002, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 11.17, | |
| "learning_rate": 3.4006896551724137e-06, | |
| "loss": 0.0003, | |
| "step": 10075 | |
| }, | |
| { | |
| "epoch": 11.2, | |
| "learning_rate": 3.383448275862069e-06, | |
| "loss": 0.0001, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 11.23, | |
| "learning_rate": 3.3662068965517243e-06, | |
| "loss": 0.0001, | |
| "step": 10125 | |
| }, | |
| { | |
| "epoch": 11.25, | |
| "learning_rate": 3.34896551724138e-06, | |
| "loss": 0.0001, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 11.28, | |
| "learning_rate": 3.331724137931035e-06, | |
| "loss": 0.0007, | |
| "step": 10175 | |
| }, | |
| { | |
| "epoch": 11.31, | |
| "learning_rate": 3.31448275862069e-06, | |
| "loss": 0.0001, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 11.34, | |
| "learning_rate": 3.297241379310345e-06, | |
| "loss": 0.0015, | |
| "step": 10225 | |
| }, | |
| { | |
| "epoch": 11.36, | |
| "learning_rate": 3.2800000000000004e-06, | |
| "loss": 0.0001, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 11.39, | |
| "learning_rate": 3.2627586206896557e-06, | |
| "loss": 0.0003, | |
| "step": 10275 | |
| }, | |
| { | |
| "epoch": 11.42, | |
| "learning_rate": 3.2455172413793105e-06, | |
| "loss": 0.0004, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 11.45, | |
| "learning_rate": 3.228275862068966e-06, | |
| "loss": 0.0024, | |
| "step": 10325 | |
| }, | |
| { | |
| "epoch": 11.47, | |
| "learning_rate": 3.211034482758621e-06, | |
| "loss": 0.0018, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "learning_rate": 3.193793103448276e-06, | |
| "loss": 0.0001, | |
| "step": 10375 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 3.1765517241379313e-06, | |
| "loss": 0.0002, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 11.56, | |
| "learning_rate": 3.1593103448275866e-06, | |
| "loss": 0.0004, | |
| "step": 10425 | |
| }, | |
| { | |
| "epoch": 11.59, | |
| "learning_rate": 3.1420689655172414e-06, | |
| "loss": 0.0004, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 11.61, | |
| "learning_rate": 3.1248275862068967e-06, | |
| "loss": 0.0019, | |
| "step": 10475 | |
| }, | |
| { | |
| "epoch": 11.64, | |
| "learning_rate": 3.1075862068965516e-06, | |
| "loss": 0.0002, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 11.67, | |
| "learning_rate": 3.091034482758621e-06, | |
| "loss": 0.0012, | |
| "step": 10525 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "learning_rate": 3.073793103448276e-06, | |
| "loss": 0.0001, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 11.72, | |
| "learning_rate": 3.056551724137931e-06, | |
| "loss": 0.0002, | |
| "step": 10575 | |
| }, | |
| { | |
| "epoch": 11.75, | |
| "learning_rate": 3.0393103448275864e-06, | |
| "loss": 0.0002, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 11.78, | |
| "learning_rate": 3.0220689655172413e-06, | |
| "loss": 0.0034, | |
| "step": 10625 | |
| }, | |
| { | |
| "epoch": 11.81, | |
| "learning_rate": 3.004827586206897e-06, | |
| "loss": 0.0002, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 11.83, | |
| "learning_rate": 2.9875862068965523e-06, | |
| "loss": 0.0001, | |
| "step": 10675 | |
| }, | |
| { | |
| "epoch": 11.86, | |
| "learning_rate": 2.970344827586207e-06, | |
| "loss": 0.0032, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 11.89, | |
| "learning_rate": 2.9531034482758625e-06, | |
| "loss": 0.0011, | |
| "step": 10725 | |
| }, | |
| { | |
| "epoch": 11.92, | |
| "learning_rate": 2.9358620689655178e-06, | |
| "loss": 0.0002, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 11.95, | |
| "learning_rate": 2.9186206896551727e-06, | |
| "loss": 0.0014, | |
| "step": 10775 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 2.901379310344828e-06, | |
| "loss": 0.0024, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 2.884137931034483e-06, | |
| "loss": 0.0002, | |
| "step": 10825 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 2.866896551724138e-06, | |
| "loss": 0.0008, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 12.06, | |
| "learning_rate": 2.8496551724137934e-06, | |
| "loss": 0.0002, | |
| "step": 10875 | |
| }, | |
| { | |
| "epoch": 12.08, | |
| "learning_rate": 2.8324137931034483e-06, | |
| "loss": 0.0001, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 12.11, | |
| "learning_rate": 2.8151724137931036e-06, | |
| "loss": 0.0003, | |
| "step": 10925 | |
| }, | |
| { | |
| "epoch": 12.14, | |
| "learning_rate": 2.797931034482759e-06, | |
| "loss": 0.0043, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 12.17, | |
| "learning_rate": 2.7806896551724137e-06, | |
| "loss": 0.0011, | |
| "step": 10975 | |
| }, | |
| { | |
| "epoch": 12.2, | |
| "learning_rate": 2.763448275862069e-06, | |
| "loss": 0.0001, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 12.2, | |
| "eval_loss": 0.47608184814453125, | |
| "eval_runtime": 3039.0267, | |
| "eval_samples_per_second": 0.561, | |
| "eval_steps_per_second": 0.07, | |
| "eval_wer": 91.78644763860369, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 12.22, | |
| "learning_rate": 2.7462068965517243e-06, | |
| "loss": 0.0019, | |
| "step": 11025 | |
| }, | |
| { | |
| "epoch": 12.25, | |
| "learning_rate": 2.728965517241379e-06, | |
| "loss": 0.0002, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 2.711724137931035e-06, | |
| "loss": 0.003, | |
| "step": 11075 | |
| }, | |
| { | |
| "epoch": 12.31, | |
| "learning_rate": 2.69448275862069e-06, | |
| "loss": 0.0002, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 12.33, | |
| "learning_rate": 2.677241379310345e-06, | |
| "loss": 0.0018, | |
| "step": 11125 | |
| }, | |
| { | |
| "epoch": 12.36, | |
| "learning_rate": 2.6600000000000004e-06, | |
| "loss": 0.0079, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 12.39, | |
| "learning_rate": 2.6427586206896557e-06, | |
| "loss": 0.0001, | |
| "step": 11175 | |
| }, | |
| { | |
| "epoch": 12.42, | |
| "learning_rate": 2.6255172413793105e-06, | |
| "loss": 0.0046, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 12.44, | |
| "learning_rate": 2.608275862068966e-06, | |
| "loss": 0.0008, | |
| "step": 11225 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 2.5910344827586207e-06, | |
| "loss": 0.001, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 2.573793103448276e-06, | |
| "loss": 0.001, | |
| "step": 11275 | |
| }, | |
| { | |
| "epoch": 12.53, | |
| "learning_rate": 2.5565517241379313e-06, | |
| "loss": 0.0005, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 12.56, | |
| "learning_rate": 2.539310344827586e-06, | |
| "loss": 0.0008, | |
| "step": 11325 | |
| }, | |
| { | |
| "epoch": 12.58, | |
| "learning_rate": 2.5220689655172414e-06, | |
| "loss": 0.0002, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 12.61, | |
| "learning_rate": 2.5048275862068967e-06, | |
| "loss": 0.0054, | |
| "step": 11375 | |
| }, | |
| { | |
| "epoch": 12.64, | |
| "learning_rate": 2.487586206896552e-06, | |
| "loss": 0.0004, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 12.67, | |
| "learning_rate": 2.4703448275862073e-06, | |
| "loss": 0.0001, | |
| "step": 11425 | |
| }, | |
| { | |
| "epoch": 12.69, | |
| "learning_rate": 2.453103448275862e-06, | |
| "loss": 0.0001, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 2.4358620689655175e-06, | |
| "loss": 0.0001, | |
| "step": 11475 | |
| }, | |
| { | |
| "epoch": 12.75, | |
| "learning_rate": 2.4186206896551724e-06, | |
| "loss": 0.0001, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 12.78, | |
| "learning_rate": 2.4013793103448277e-06, | |
| "loss": 0.0001, | |
| "step": 11525 | |
| }, | |
| { | |
| "epoch": 12.8, | |
| "learning_rate": 2.384137931034483e-06, | |
| "loss": 0.0001, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 12.83, | |
| "learning_rate": 2.3668965517241382e-06, | |
| "loss": 0.0014, | |
| "step": 11575 | |
| }, | |
| { | |
| "epoch": 12.86, | |
| "learning_rate": 2.3496551724137935e-06, | |
| "loss": 0.0012, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 12.89, | |
| "learning_rate": 2.3324137931034484e-06, | |
| "loss": 0.0001, | |
| "step": 11625 | |
| }, | |
| { | |
| "epoch": 12.92, | |
| "learning_rate": 2.3151724137931037e-06, | |
| "loss": 0.0001, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 2.297931034482759e-06, | |
| "loss": 0.0004, | |
| "step": 11675 | |
| }, | |
| { | |
| "epoch": 12.97, | |
| "learning_rate": 2.280689655172414e-06, | |
| "loss": 0.0001, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "learning_rate": 2.263448275862069e-06, | |
| "loss": 0.0001, | |
| "step": 11725 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 2.246206896551724e-06, | |
| "loss": 0.0001, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 13.05, | |
| "learning_rate": 2.2289655172413797e-06, | |
| "loss": 0.0001, | |
| "step": 11775 | |
| }, | |
| { | |
| "epoch": 13.08, | |
| "learning_rate": 2.2117241379310346e-06, | |
| "loss": 0.0001, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 13.11, | |
| "learning_rate": 2.19448275862069e-06, | |
| "loss": 0.0001, | |
| "step": 11825 | |
| }, | |
| { | |
| "epoch": 13.14, | |
| "learning_rate": 2.177241379310345e-06, | |
| "loss": 0.0001, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 13.17, | |
| "learning_rate": 2.16e-06, | |
| "loss": 0.0001, | |
| "step": 11875 | |
| }, | |
| { | |
| "epoch": 13.19, | |
| "learning_rate": 2.1427586206896554e-06, | |
| "loss": 0.0001, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 13.22, | |
| "learning_rate": 2.1255172413793102e-06, | |
| "loss": 0.0001, | |
| "step": 11925 | |
| }, | |
| { | |
| "epoch": 13.25, | |
| "learning_rate": 2.1082758620689655e-06, | |
| "loss": 0.0001, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 13.28, | |
| "learning_rate": 2.091034482758621e-06, | |
| "loss": 0.0001, | |
| "step": 11975 | |
| }, | |
| { | |
| "epoch": 13.3, | |
| "learning_rate": 2.073793103448276e-06, | |
| "loss": 0.0002, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 13.3, | |
| "eval_loss": 0.4738454222679138, | |
| "eval_runtime": 2933.9692, | |
| "eval_samples_per_second": 0.581, | |
| "eval_steps_per_second": 0.073, | |
| "eval_wer": 62.61900317341795, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 13.33, | |
| "learning_rate": 2.0565517241379314e-06, | |
| "loss": 0.0001, | |
| "step": 12025 | |
| }, | |
| { | |
| "epoch": 13.36, | |
| "learning_rate": 2.0393103448275863e-06, | |
| "loss": 0.0004, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 13.39, | |
| "learning_rate": 2.0220689655172416e-06, | |
| "loss": 0.0001, | |
| "step": 12075 | |
| }, | |
| { | |
| "epoch": 13.41, | |
| "learning_rate": 2.004827586206897e-06, | |
| "loss": 0.0018, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 13.44, | |
| "learning_rate": 1.9875862068965517e-06, | |
| "loss": 0.0001, | |
| "step": 12125 | |
| }, | |
| { | |
| "epoch": 13.47, | |
| "learning_rate": 1.970344827586207e-06, | |
| "loss": 0.0001, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 13.5, | |
| "learning_rate": 1.9531034482758623e-06, | |
| "loss": 0.0001, | |
| "step": 12175 | |
| }, | |
| { | |
| "epoch": 13.53, | |
| "learning_rate": 1.9358620689655176e-06, | |
| "loss": 0.002, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 13.55, | |
| "learning_rate": 1.9186206896551725e-06, | |
| "loss": 0.0002, | |
| "step": 12225 | |
| }, | |
| { | |
| "epoch": 13.58, | |
| "learning_rate": 1.9013793103448278e-06, | |
| "loss": 0.0047, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 13.61, | |
| "learning_rate": 1.8841379310344829e-06, | |
| "loss": 0.0001, | |
| "step": 12275 | |
| }, | |
| { | |
| "epoch": 13.64, | |
| "learning_rate": 1.866896551724138e-06, | |
| "loss": 0.0001, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 13.66, | |
| "learning_rate": 1.8496551724137932e-06, | |
| "loss": 0.0001, | |
| "step": 12325 | |
| }, | |
| { | |
| "epoch": 13.69, | |
| "learning_rate": 1.8324137931034483e-06, | |
| "loss": 0.0001, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 13.72, | |
| "learning_rate": 1.8151724137931036e-06, | |
| "loss": 0.0012, | |
| "step": 12375 | |
| }, | |
| { | |
| "epoch": 13.75, | |
| "learning_rate": 1.797931034482759e-06, | |
| "loss": 0.0006, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 13.77, | |
| "learning_rate": 1.780689655172414e-06, | |
| "loss": 0.0001, | |
| "step": 12425 | |
| }, | |
| { | |
| "epoch": 13.8, | |
| "learning_rate": 1.763448275862069e-06, | |
| "loss": 0.0004, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 13.83, | |
| "learning_rate": 1.7462068965517244e-06, | |
| "loss": 0.0005, | |
| "step": 12475 | |
| }, | |
| { | |
| "epoch": 13.86, | |
| "learning_rate": 1.7289655172413794e-06, | |
| "loss": 0.0002, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 1.7117241379310345e-06, | |
| "loss": 0.0001, | |
| "step": 12525 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 1.6944827586206896e-06, | |
| "loss": 0.0001, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 13.94, | |
| "learning_rate": 1.6772413793103451e-06, | |
| "loss": 0.0001, | |
| "step": 12575 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 1.6600000000000002e-06, | |
| "loss": 0.0001, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 1.6427586206896553e-06, | |
| "loss": 0.0001, | |
| "step": 12625 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 1.6255172413793106e-06, | |
| "loss": 0.0001, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 14.05, | |
| "learning_rate": 1.6082758620689657e-06, | |
| "loss": 0.0001, | |
| "step": 12675 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "learning_rate": 1.5910344827586207e-06, | |
| "loss": 0.0001, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 14.11, | |
| "learning_rate": 1.5737931034482758e-06, | |
| "loss": 0.0001, | |
| "step": 12725 | |
| }, | |
| { | |
| "epoch": 14.14, | |
| "learning_rate": 1.5565517241379311e-06, | |
| "loss": 0.0001, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 14.16, | |
| "learning_rate": 1.5393103448275864e-06, | |
| "loss": 0.0001, | |
| "step": 12775 | |
| }, | |
| { | |
| "epoch": 14.19, | |
| "learning_rate": 1.5220689655172415e-06, | |
| "loss": 0.0001, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 14.22, | |
| "learning_rate": 1.5048275862068968e-06, | |
| "loss": 0.0001, | |
| "step": 12825 | |
| }, | |
| { | |
| "epoch": 14.25, | |
| "learning_rate": 1.4875862068965519e-06, | |
| "loss": 0.0001, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 14.27, | |
| "learning_rate": 1.470344827586207e-06, | |
| "loss": 0.0001, | |
| "step": 12875 | |
| }, | |
| { | |
| "epoch": 14.3, | |
| "learning_rate": 1.4531034482758622e-06, | |
| "loss": 0.0001, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 14.33, | |
| "learning_rate": 1.4358620689655173e-06, | |
| "loss": 0.0001, | |
| "step": 12925 | |
| }, | |
| { | |
| "epoch": 14.36, | |
| "learning_rate": 1.4186206896551724e-06, | |
| "loss": 0.0001, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 14.38, | |
| "learning_rate": 1.401379310344828e-06, | |
| "loss": 0.0001, | |
| "step": 12975 | |
| }, | |
| { | |
| "epoch": 14.41, | |
| "learning_rate": 1.384137931034483e-06, | |
| "loss": 0.0001, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 14.41, | |
| "eval_loss": 0.478272408246994, | |
| "eval_runtime": 2940.524, | |
| "eval_samples_per_second": 0.579, | |
| "eval_steps_per_second": 0.072, | |
| "eval_wer": 56.720179204778795, | |
| "step": 13000 | |
| } | |
| ], | |
| "max_steps": 15000, | |
| "num_train_epochs": 17, | |
| "total_flos": 1.500240062103552e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |