| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 228, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0043859649122807015, | |
| "grad_norm": 49.592716217041016, | |
| "learning_rate": 1.4285714285714286e-06, | |
| "loss": 1.2955, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008771929824561403, | |
| "grad_norm": 52.511619567871094, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 1.3459, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.013157894736842105, | |
| "grad_norm": 23.82323455810547, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 1.2305, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.017543859649122806, | |
| "grad_norm": 11.829014778137207, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 1.0947, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.021929824561403508, | |
| "grad_norm": 20.558698654174805, | |
| "learning_rate": 7.1428571428571436e-06, | |
| "loss": 1.0768, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02631578947368421, | |
| "grad_norm": 14.469958305358887, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 1.0448, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03070175438596491, | |
| "grad_norm": 7.883849620819092, | |
| "learning_rate": 1e-05, | |
| "loss": 1.0738, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03508771929824561, | |
| "grad_norm": 7.827476978302002, | |
| "learning_rate": 9.999494817970498e-06, | |
| "loss": 1.0353, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.039473684210526314, | |
| "grad_norm": 8.651590347290039, | |
| "learning_rate": 9.997979373965542e-06, | |
| "loss": 0.9942, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.043859649122807015, | |
| "grad_norm": 6.32966423034668, | |
| "learning_rate": 9.995453974215164e-06, | |
| "loss": 1.0467, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04824561403508772, | |
| "grad_norm": 5.549691677093506, | |
| "learning_rate": 9.991919129033994e-06, | |
| "loss": 0.9998, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05263157894736842, | |
| "grad_norm": 5.89638614654541, | |
| "learning_rate": 9.987375552718133e-06, | |
| "loss": 0.9997, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.05701754385964912, | |
| "grad_norm": 6.868354320526123, | |
| "learning_rate": 9.981824163400827e-06, | |
| "loss": 1.0602, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06140350877192982, | |
| "grad_norm": 5.4902496337890625, | |
| "learning_rate": 9.975266082866923e-06, | |
| "loss": 1.043, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06578947368421052, | |
| "grad_norm": 5.782011032104492, | |
| "learning_rate": 9.967702636326195e-06, | |
| "loss": 1.0331, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07017543859649122, | |
| "grad_norm": 6.002492427825928, | |
| "learning_rate": 9.959135352145552e-06, | |
| "loss": 1.0376, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.07456140350877193, | |
| "grad_norm": 5.092071056365967, | |
| "learning_rate": 9.9495659615402e-06, | |
| "loss": 1.0244, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.07894736842105263, | |
| "grad_norm": 5.242514133453369, | |
| "learning_rate": 9.938996398223802e-06, | |
| "loss": 0.9812, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 5.504293918609619, | |
| "learning_rate": 9.927428798017738e-06, | |
| "loss": 1.0288, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08771929824561403, | |
| "grad_norm": 5.18280029296875, | |
| "learning_rate": 9.91486549841951e-06, | |
| "loss": 1.0403, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09210526315789473, | |
| "grad_norm": 5.409468173980713, | |
| "learning_rate": 9.901309038130392e-06, | |
| "loss": 1.046, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09649122807017543, | |
| "grad_norm": 4.993797779083252, | |
| "learning_rate": 9.886762156542428e-06, | |
| "loss": 1.0609, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10087719298245613, | |
| "grad_norm": 4.722639083862305, | |
| "learning_rate": 9.871227793184893e-06, | |
| "loss": 1.03, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.10526315789473684, | |
| "grad_norm": 5.58522367477417, | |
| "learning_rate": 9.854709087130261e-06, | |
| "loss": 1.0388, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.10964912280701754, | |
| "grad_norm": 5.170425891876221, | |
| "learning_rate": 9.837209376359918e-06, | |
| "loss": 1.0588, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11403508771929824, | |
| "grad_norm": 5.433144569396973, | |
| "learning_rate": 9.81873219708962e-06, | |
| "loss": 1.0688, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.11842105263157894, | |
| "grad_norm": 4.71676778793335, | |
| "learning_rate": 9.79928128305494e-06, | |
| "loss": 1.0311, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.12280701754385964, | |
| "grad_norm": 4.437475681304932, | |
| "learning_rate": 9.778860564756769e-06, | |
| "loss": 1.0086, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.12719298245614036, | |
| "grad_norm": 5.034616947174072, | |
| "learning_rate": 9.757474168667072e-06, | |
| "loss": 1.0627, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 4.68194055557251, | |
| "learning_rate": 9.73512641639504e-06, | |
| "loss": 1.0349, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13596491228070176, | |
| "grad_norm": 4.9859700202941895, | |
| "learning_rate": 9.711821823813812e-06, | |
| "loss": 1.1047, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.14035087719298245, | |
| "grad_norm": 4.754051685333252, | |
| "learning_rate": 9.68756510014794e-06, | |
| "loss": 1.0646, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.14473684210526316, | |
| "grad_norm": 4.727588653564453, | |
| "learning_rate": 9.66236114702178e-06, | |
| "loss": 1.0211, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.14912280701754385, | |
| "grad_norm": 4.892414569854736, | |
| "learning_rate": 9.636215057469009e-06, | |
| "loss": 1.03, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.15350877192982457, | |
| "grad_norm": 4.797459602355957, | |
| "learning_rate": 9.609132114903458e-06, | |
| "loss": 1.0495, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.15789473684210525, | |
| "grad_norm": 4.844034194946289, | |
| "learning_rate": 9.581117792051487e-06, | |
| "loss": 1.0653, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.16228070175438597, | |
| "grad_norm": 4.3364057540893555, | |
| "learning_rate": 9.552177749846083e-06, | |
| "loss": 1.0085, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 4.658485412597656, | |
| "learning_rate": 9.522317836282949e-06, | |
| "loss": 1.0548, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.17105263157894737, | |
| "grad_norm": 4.726075649261475, | |
| "learning_rate": 9.491544085238778e-06, | |
| "loss": 1.0704, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.17543859649122806, | |
| "grad_norm": 4.422685623168945, | |
| "learning_rate": 9.459862715251973e-06, | |
| "loss": 1.0463, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.17982456140350878, | |
| "grad_norm": 4.876737117767334, | |
| "learning_rate": 9.427280128266049e-06, | |
| "loss": 1.0771, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.18421052631578946, | |
| "grad_norm": 4.657575607299805, | |
| "learning_rate": 9.393802908335978e-06, | |
| "loss": 1.0615, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.18859649122807018, | |
| "grad_norm": 4.246818542480469, | |
| "learning_rate": 9.359437820297716e-06, | |
| "loss": 1.0431, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.19298245614035087, | |
| "grad_norm": 4.417186737060547, | |
| "learning_rate": 9.324191808401235e-06, | |
| "loss": 1.0271, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.19736842105263158, | |
| "grad_norm": 4.741846084594727, | |
| "learning_rate": 9.288071994907262e-06, | |
| "loss": 1.0765, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.20175438596491227, | |
| "grad_norm": 5.002007007598877, | |
| "learning_rate": 9.251085678648072e-06, | |
| "loss": 1.0526, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.20614035087719298, | |
| "grad_norm": 4.708237171173096, | |
| "learning_rate": 9.213240333552589e-06, | |
| "loss": 1.0502, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.21052631578947367, | |
| "grad_norm": 4.335841655731201, | |
| "learning_rate": 9.174543607136111e-06, | |
| "loss": 0.9878, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2149122807017544, | |
| "grad_norm": 4.573909282684326, | |
| "learning_rate": 9.135003318954954e-06, | |
| "loss": 0.9989, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.21929824561403508, | |
| "grad_norm": 4.54152774810791, | |
| "learning_rate": 9.094627459026326e-06, | |
| "loss": 1.0816, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2236842105263158, | |
| "grad_norm": 4.57687520980835, | |
| "learning_rate": 9.053424186213776e-06, | |
| "loss": 1.0509, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.22807017543859648, | |
| "grad_norm": 4.246875762939453, | |
| "learning_rate": 9.011401826578492e-06, | |
| "loss": 0.9849, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.2324561403508772, | |
| "grad_norm": 4.673925876617432, | |
| "learning_rate": 8.968568871696847e-06, | |
| "loss": 0.9975, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.23684210526315788, | |
| "grad_norm": 4.580467224121094, | |
| "learning_rate": 8.924933976944474e-06, | |
| "loss": 1.0747, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.2412280701754386, | |
| "grad_norm": 4.450971603393555, | |
| "learning_rate": 8.880505959747245e-06, | |
| "loss": 1.028, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.24561403508771928, | |
| "grad_norm": 4.807090759277344, | |
| "learning_rate": 8.835293797799517e-06, | |
| "loss": 0.9659, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 4.357285022735596, | |
| "learning_rate": 8.789306627249985e-06, | |
| "loss": 1.0228, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2543859649122807, | |
| "grad_norm": 4.60875940322876, | |
| "learning_rate": 8.742553740855507e-06, | |
| "loss": 1.0338, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.25877192982456143, | |
| "grad_norm": 4.6360321044921875, | |
| "learning_rate": 8.695044586103297e-06, | |
| "loss": 1.0344, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 4.603052139282227, | |
| "learning_rate": 8.646788763301842e-06, | |
| "loss": 1.0423, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2675438596491228, | |
| "grad_norm": 4.477334976196289, | |
| "learning_rate": 8.59779602364094e-06, | |
| "loss": 1.0372, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2719298245614035, | |
| "grad_norm": 4.819328784942627, | |
| "learning_rate": 8.548076267221258e-06, | |
| "loss": 1.0935, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.27631578947368424, | |
| "grad_norm": 4.351193428039551, | |
| "learning_rate": 8.497639541053769e-06, | |
| "loss": 0.9915, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.2807017543859649, | |
| "grad_norm": 9.282684326171875, | |
| "learning_rate": 8.446496037029555e-06, | |
| "loss": 1.0252, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.2850877192982456, | |
| "grad_norm": 4.520321369171143, | |
| "learning_rate": 8.394656089860274e-06, | |
| "loss": 0.9989, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2894736842105263, | |
| "grad_norm": 4.605741500854492, | |
| "learning_rate": 8.342130174989819e-06, | |
| "loss": 1.0872, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.29385964912280704, | |
| "grad_norm": 4.512912750244141, | |
| "learning_rate": 8.288928906477497e-06, | |
| "loss": 1.0545, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.2982456140350877, | |
| "grad_norm": 4.024132251739502, | |
| "learning_rate": 8.235063034853228e-06, | |
| "loss": 1.0128, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3026315789473684, | |
| "grad_norm": 4.560809135437012, | |
| "learning_rate": 8.180543444945154e-06, | |
| "loss": 1.0294, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.30701754385964913, | |
| "grad_norm": 4.295252799987793, | |
| "learning_rate": 8.125381153680103e-06, | |
| "loss": 1.0445, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.31140350877192985, | |
| "grad_norm": 4.614228248596191, | |
| "learning_rate": 8.069587307857377e-06, | |
| "loss": 1.0168, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3157894736842105, | |
| "grad_norm": 4.256739139556885, | |
| "learning_rate": 8.013173181896283e-06, | |
| "loss": 1.0207, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3201754385964912, | |
| "grad_norm": 4.458586692810059, | |
| "learning_rate": 7.95615017555788e-06, | |
| "loss": 1.0401, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.32456140350877194, | |
| "grad_norm": 4.6360392570495605, | |
| "learning_rate": 7.898529811641393e-06, | |
| "loss": 1.0347, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.32894736842105265, | |
| "grad_norm": 4.286371231079102, | |
| "learning_rate": 7.84032373365578e-06, | |
| "loss": 1.061, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 4.306332588195801, | |
| "learning_rate": 7.781543703466881e-06, | |
| "loss": 0.988, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.33771929824561403, | |
| "grad_norm": 4.2425947189331055, | |
| "learning_rate": 7.722201598920673e-06, | |
| "loss": 1.0442, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.34210526315789475, | |
| "grad_norm": 4.27526330947876, | |
| "learning_rate": 7.662309411443084e-06, | |
| "loss": 1.0412, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.34649122807017546, | |
| "grad_norm": 4.224104881286621, | |
| "learning_rate": 7.601879243616838e-06, | |
| "loss": 1.0205, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3508771929824561, | |
| "grad_norm": 4.250307559967041, | |
| "learning_rate": 7.540923306735868e-06, | |
| "loss": 1.0298, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.35526315789473684, | |
| "grad_norm": 4.223860263824463, | |
| "learning_rate": 7.479453918337733e-06, | |
| "loss": 1.0174, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.35964912280701755, | |
| "grad_norm": 4.442715644836426, | |
| "learning_rate": 7.417483499714589e-06, | |
| "loss": 1.0281, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.36403508771929827, | |
| "grad_norm": 4.697696685791016, | |
| "learning_rate": 7.355024573403174e-06, | |
| "loss": 1.0045, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3684210526315789, | |
| "grad_norm": 4.343409061431885, | |
| "learning_rate": 7.292089760654352e-06, | |
| "loss": 1.0253, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.37280701754385964, | |
| "grad_norm": 4.392301082611084, | |
| "learning_rate": 7.2286917788826926e-06, | |
| "loss": 1.0245, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.37719298245614036, | |
| "grad_norm": 5.410698890686035, | |
| "learning_rate": 7.1648434390966356e-06, | |
| "loss": 1.0149, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.3815789473684211, | |
| "grad_norm": 4.763819694519043, | |
| "learning_rate": 7.100557643309732e-06, | |
| "loss": 1.0019, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.38596491228070173, | |
| "grad_norm": 4.71998405456543, | |
| "learning_rate": 7.035847381933494e-06, | |
| "loss": 1.0147, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.39035087719298245, | |
| "grad_norm": 4.432498931884766, | |
| "learning_rate": 6.970725731152389e-06, | |
| "loss": 0.9572, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 4.130496978759766, | |
| "learning_rate": 6.905205850281502e-06, | |
| "loss": 0.9651, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3991228070175439, | |
| "grad_norm": 4.6582441329956055, | |
| "learning_rate": 6.8393009791073895e-06, | |
| "loss": 1.0471, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.40350877192982454, | |
| "grad_norm": 4.263586521148682, | |
| "learning_rate": 6.773024435212678e-06, | |
| "loss": 1.0062, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.40789473684210525, | |
| "grad_norm": 4.377040386199951, | |
| "learning_rate": 6.706389611284953e-06, | |
| "loss": 1.0101, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.41228070175438597, | |
| "grad_norm": 4.429991722106934, | |
| "learning_rate": 6.639409972410446e-06, | |
| "loss": 1.0208, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 4.125415325164795, | |
| "learning_rate": 6.57209905335312e-06, | |
| "loss": 1.0014, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.42105263157894735, | |
| "grad_norm": 4.075379848480225, | |
| "learning_rate": 6.504470455819651e-06, | |
| "loss": 0.9726, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.42543859649122806, | |
| "grad_norm": 4.166284084320068, | |
| "learning_rate": 6.436537845710904e-06, | |
| "loss": 1.0462, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4298245614035088, | |
| "grad_norm": 3.857902765274048, | |
| "learning_rate": 6.368314950360416e-06, | |
| "loss": 0.997, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4342105263157895, | |
| "grad_norm": 4.08658504486084, | |
| "learning_rate": 6.299815555760478e-06, | |
| "loss": 1.0351, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.43859649122807015, | |
| "grad_norm": 4.2274651527404785, | |
| "learning_rate": 6.231053503776363e-06, | |
| "loss": 1.04, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.44298245614035087, | |
| "grad_norm": 4.353753089904785, | |
| "learning_rate": 6.1620426893492645e-06, | |
| "loss": 0.9802, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.4473684210526316, | |
| "grad_norm": 4.267554759979248, | |
| "learning_rate": 6.092797057688496e-06, | |
| "loss": 1.006, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4517543859649123, | |
| "grad_norm": 4.590907573699951, | |
| "learning_rate": 6.0233306014535505e-06, | |
| "loss": 0.9833, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.45614035087719296, | |
| "grad_norm": 4.248331546783447, | |
| "learning_rate": 5.953657357926569e-06, | |
| "loss": 1.0409, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4605263157894737, | |
| "grad_norm": 3.9719252586364746, | |
| "learning_rate": 5.883791406175775e-06, | |
| "loss": 1.0191, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4649122807017544, | |
| "grad_norm": 4.340267181396484, | |
| "learning_rate": 5.813746864210489e-06, | |
| "loss": 0.9992, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.4692982456140351, | |
| "grad_norm": 4.42672872543335, | |
| "learning_rate": 5.743537886128258e-06, | |
| "loss": 1.0023, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.47368421052631576, | |
| "grad_norm": 4.119699478149414, | |
| "learning_rate": 5.673178659254698e-06, | |
| "loss": 0.9909, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.4780701754385965, | |
| "grad_norm": 4.342530250549316, | |
| "learning_rate": 5.6026834012766155e-06, | |
| "loss": 0.9598, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.4824561403508772, | |
| "grad_norm": 4.101733684539795, | |
| "learning_rate": 5.532066357369012e-06, | |
| "loss": 0.9879, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4868421052631579, | |
| "grad_norm": 4.340119361877441, | |
| "learning_rate": 5.46134179731651e-06, | |
| "loss": 1.0184, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.49122807017543857, | |
| "grad_norm": 4.37206506729126, | |
| "learning_rate": 5.390524012629824e-06, | |
| "loss": 1.0496, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.4956140350877193, | |
| "grad_norm": 4.118902683258057, | |
| "learning_rate": 5.319627313657829e-06, | |
| "loss": 1.0399, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 4.071263790130615, | |
| "learning_rate": 5.248666026695835e-06, | |
| "loss": 0.9906, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5043859649122807, | |
| "grad_norm": 4.421329975128174, | |
| "learning_rate": 5.177654491090627e-06, | |
| "loss": 0.9836, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5087719298245614, | |
| "grad_norm": 4.517954349517822, | |
| "learning_rate": 5.1066070563428736e-06, | |
| "loss": 1.0083, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5131578947368421, | |
| "grad_norm": 4.462676525115967, | |
| "learning_rate": 5.035538079207488e-06, | |
| "loss": 1.0034, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5175438596491229, | |
| "grad_norm": 4.097958087921143, | |
| "learning_rate": 4.964461920792512e-06, | |
| "loss": 1.0244, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5219298245614035, | |
| "grad_norm": 4.527233600616455, | |
| "learning_rate": 4.893392943657127e-06, | |
| "loss": 0.9473, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 4.666014671325684, | |
| "learning_rate": 4.822345508909376e-06, | |
| "loss": 0.93, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5307017543859649, | |
| "grad_norm": 4.115242958068848, | |
| "learning_rate": 4.751333973304166e-06, | |
| "loss": 1.0262, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5350877192982456, | |
| "grad_norm": 4.032143592834473, | |
| "learning_rate": 4.680372686342173e-06, | |
| "loss": 1.0236, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5394736842105263, | |
| "grad_norm": 4.429379463195801, | |
| "learning_rate": 4.609475987370177e-06, | |
| "loss": 0.9891, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.543859649122807, | |
| "grad_norm": 4.283740043640137, | |
| "learning_rate": 4.53865820268349e-06, | |
| "loss": 0.9761, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5482456140350878, | |
| "grad_norm": 4.0465545654296875, | |
| "learning_rate": 4.467933642630989e-06, | |
| "loss": 0.9847, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5526315789473685, | |
| "grad_norm": 4.005406856536865, | |
| "learning_rate": 4.397316598723385e-06, | |
| "loss": 0.9916, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5570175438596491, | |
| "grad_norm": 4.05433988571167, | |
| "learning_rate": 4.326821340745304e-06, | |
| "loss": 0.9535, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5614035087719298, | |
| "grad_norm": 4.201546669006348, | |
| "learning_rate": 4.256462113871741e-06, | |
| "loss": 0.9249, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5657894736842105, | |
| "grad_norm": 4.1509904861450195, | |
| "learning_rate": 4.186253135789511e-06, | |
| "loss": 0.9885, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5701754385964912, | |
| "grad_norm": 4.355051040649414, | |
| "learning_rate": 4.116208593824227e-06, | |
| "loss": 1.0087, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5745614035087719, | |
| "grad_norm": 4.163605690002441, | |
| "learning_rate": 4.046342642073433e-06, | |
| "loss": 0.9654, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5789473684210527, | |
| "grad_norm": 4.231169700622559, | |
| "learning_rate": 3.976669398546451e-06, | |
| "loss": 0.991, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5833333333333334, | |
| "grad_norm": 4.092093467712402, | |
| "learning_rate": 3.907202942311506e-06, | |
| "loss": 0.9702, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5877192982456141, | |
| "grad_norm": 4.0646233558654785, | |
| "learning_rate": 3.837957310650738e-06, | |
| "loss": 0.9451, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.5921052631578947, | |
| "grad_norm": 4.397289276123047, | |
| "learning_rate": 3.7689464962236367e-06, | |
| "loss": 0.9498, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5964912280701754, | |
| "grad_norm": 3.946786403656006, | |
| "learning_rate": 3.700184444239524e-06, | |
| "loss": 0.9828, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6008771929824561, | |
| "grad_norm": 4.302344799041748, | |
| "learning_rate": 3.6316850496395863e-06, | |
| "loss": 0.956, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6052631578947368, | |
| "grad_norm": 4.430941581726074, | |
| "learning_rate": 3.563462154289098e-06, | |
| "loss": 0.9553, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6096491228070176, | |
| "grad_norm": 4.306185722351074, | |
| "learning_rate": 3.49552954418035e-06, | |
| "loss": 1.0269, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6140350877192983, | |
| "grad_norm": 4.095279216766357, | |
| "learning_rate": 3.4279009466468825e-06, | |
| "loss": 0.9888, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.618421052631579, | |
| "grad_norm": 4.261977195739746, | |
| "learning_rate": 3.3605900275895565e-06, | |
| "loss": 1.0003, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6228070175438597, | |
| "grad_norm": 3.9479899406433105, | |
| "learning_rate": 3.2936103887150484e-06, | |
| "loss": 0.9982, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6271929824561403, | |
| "grad_norm": 4.058523654937744, | |
| "learning_rate": 3.226975564787322e-06, | |
| "loss": 0.9645, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.631578947368421, | |
| "grad_norm": 4.375962734222412, | |
| "learning_rate": 3.1606990208926125e-06, | |
| "loss": 0.9443, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6359649122807017, | |
| "grad_norm": 4.618311405181885, | |
| "learning_rate": 3.0947941497184985e-06, | |
| "loss": 1.0069, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6403508771929824, | |
| "grad_norm": 4.422805309295654, | |
| "learning_rate": 3.0292742688476125e-06, | |
| "loss": 0.9718, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6447368421052632, | |
| "grad_norm": 3.93886399269104, | |
| "learning_rate": 2.964152618066508e-06, | |
| "loss": 0.9981, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.6491228070175439, | |
| "grad_norm": 3.937767744064331, | |
| "learning_rate": 2.899442356690271e-06, | |
| "loss": 0.994, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6535087719298246, | |
| "grad_norm": 4.434424877166748, | |
| "learning_rate": 2.835156560903365e-06, | |
| "loss": 0.9605, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 4.723587989807129, | |
| "learning_rate": 2.771308221117309e-06, | |
| "loss": 0.9965, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6622807017543859, | |
| "grad_norm": 4.1226301193237305, | |
| "learning_rate": 2.7079102393456503e-06, | |
| "loss": 1.0161, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 3.8456857204437256, | |
| "learning_rate": 2.6449754265968263e-06, | |
| "loss": 0.9696, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6710526315789473, | |
| "grad_norm": 3.9130499362945557, | |
| "learning_rate": 2.5825165002854124e-06, | |
| "loss": 0.9674, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6754385964912281, | |
| "grad_norm": 4.2803473472595215, | |
| "learning_rate": 2.5205460816622684e-06, | |
| "loss": 0.9933, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6798245614035088, | |
| "grad_norm": 3.8558075428009033, | |
| "learning_rate": 2.4590766932641353e-06, | |
| "loss": 0.977, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6842105263157895, | |
| "grad_norm": 4.1506452560424805, | |
| "learning_rate": 2.3981207563831633e-06, | |
| "loss": 0.9729, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.6885964912280702, | |
| "grad_norm": 4.3258843421936035, | |
| "learning_rate": 2.3376905885569185e-06, | |
| "loss": 0.914, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.6929824561403509, | |
| "grad_norm": 4.091325759887695, | |
| "learning_rate": 2.2777984010793264e-06, | |
| "loss": 0.9641, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.6973684210526315, | |
| "grad_norm": 4.333226203918457, | |
| "learning_rate": 2.2184562965331203e-06, | |
| "loss": 0.9074, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7017543859649122, | |
| "grad_norm": 3.8289241790771484, | |
| "learning_rate": 2.159676266344222e-06, | |
| "loss": 0.938, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.706140350877193, | |
| "grad_norm": 4.058959007263184, | |
| "learning_rate": 2.1014701883586087e-06, | |
| "loss": 0.9387, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7105263157894737, | |
| "grad_norm": 3.7621591091156006, | |
| "learning_rate": 2.043849824442124e-06, | |
| "loss": 0.9931, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7149122807017544, | |
| "grad_norm": 4.214111804962158, | |
| "learning_rate": 1.9868268181037186e-06, | |
| "loss": 0.9578, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7192982456140351, | |
| "grad_norm": 4.251298427581787, | |
| "learning_rate": 1.9304126921426235e-06, | |
| "loss": 0.9615, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7236842105263158, | |
| "grad_norm": 3.980278730392456, | |
| "learning_rate": 1.8746188463198983e-06, | |
| "loss": 0.952, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7280701754385965, | |
| "grad_norm": 4.493079662322998, | |
| "learning_rate": 1.8194565550548477e-06, | |
| "loss": 0.9422, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7324561403508771, | |
| "grad_norm": 4.167688846588135, | |
| "learning_rate": 1.764936965146773e-06, | |
| "loss": 0.9556, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7368421052631579, | |
| "grad_norm": 3.890090227127075, | |
| "learning_rate": 1.7110710935225055e-06, | |
| "loss": 0.9609, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7412280701754386, | |
| "grad_norm": 4.315472602844238, | |
| "learning_rate": 1.6578698250101828e-06, | |
| "loss": 0.9945, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.7456140350877193, | |
| "grad_norm": 4.024162769317627, | |
| "learning_rate": 1.6053439101397257e-06, | |
| "loss": 0.9526, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 4.042996883392334, | |
| "learning_rate": 1.5535039629704467e-06, | |
| "loss": 0.953, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.7543859649122807, | |
| "grad_norm": 4.670426845550537, | |
| "learning_rate": 1.502360458946232e-06, | |
| "loss": 0.9418, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7587719298245614, | |
| "grad_norm": 3.7296359539031982, | |
| "learning_rate": 1.451923732778745e-06, | |
| "loss": 0.9842, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7631578947368421, | |
| "grad_norm": 4.069624423980713, | |
| "learning_rate": 1.4022039763590595e-06, | |
| "loss": 0.9483, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7675438596491229, | |
| "grad_norm": 3.8240842819213867, | |
| "learning_rate": 1.3532112366981598e-06, | |
| "loss": 1.0225, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7719298245614035, | |
| "grad_norm": 4.527555465698242, | |
| "learning_rate": 1.3049554138967052e-06, | |
| "loss": 0.9787, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7763157894736842, | |
| "grad_norm": 3.748631238937378, | |
| "learning_rate": 1.257446259144494e-06, | |
| "loss": 0.9812, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7807017543859649, | |
| "grad_norm": 3.8050689697265625, | |
| "learning_rate": 1.210693372750017e-06, | |
| "loss": 0.9686, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7850877192982456, | |
| "grad_norm": 3.9663491249084473, | |
| "learning_rate": 1.1647062022004845e-06, | |
| "loss": 0.9909, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 3.8755075931549072, | |
| "learning_rate": 1.1194940402527566e-06, | |
| "loss": 0.9682, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.793859649122807, | |
| "grad_norm": 3.51550030708313, | |
| "learning_rate": 1.075066023055527e-06, | |
| "loss": 0.9218, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.7982456140350878, | |
| "grad_norm": 3.9786829948425293, | |
| "learning_rate": 1.0314311283031531e-06, | |
| "loss": 0.9753, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8026315789473685, | |
| "grad_norm": 4.108859539031982, | |
| "learning_rate": 9.885981734215094e-07, | |
| "loss": 0.9746, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8070175438596491, | |
| "grad_norm": 4.208093166351318, | |
| "learning_rate": 9.465758137862264e-07, | |
| "loss": 0.9479, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8114035087719298, | |
| "grad_norm": 4.333961009979248, | |
| "learning_rate": 9.053725409736752e-07, | |
| "loss": 0.9734, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8157894736842105, | |
| "grad_norm": 3.7324233055114746, | |
| "learning_rate": 8.649966810450472e-07, | |
| "loss": 0.9145, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8201754385964912, | |
| "grad_norm": 3.5640878677368164, | |
| "learning_rate": 8.254563928638892e-07, | |
| "loss": 0.9302, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8245614035087719, | |
| "grad_norm": 4.192229270935059, | |
| "learning_rate": 7.86759666447412e-07, | |
| "loss": 0.9509, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8289473684210527, | |
| "grad_norm": 4.028424263000488, | |
| "learning_rate": 7.489143213519301e-07, | |
| "loss": 0.9081, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 3.9033594131469727, | |
| "learning_rate": 7.119280050927407e-07, | |
| "loss": 0.9404, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8377192982456141, | |
| "grad_norm": 4.0438361167907715, | |
| "learning_rate": 6.758081915987669e-07, | |
| "loss": 0.9482, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.8421052631578947, | |
| "grad_norm": 4.058148384094238, | |
| "learning_rate": 6.405621797022848e-07, | |
| "loss": 0.9161, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.8464912280701754, | |
| "grad_norm": 4.070837020874023, | |
| "learning_rate": 6.061970916640236e-07, | |
| "loss": 0.9351, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.8508771929824561, | |
| "grad_norm": 3.934864044189453, | |
| "learning_rate": 5.727198717339511e-07, | |
| "loss": 0.9633, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.8552631578947368, | |
| "grad_norm": 3.696377992630005, | |
| "learning_rate": 5.401372847480285e-07, | |
| "loss": 0.9923, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.8596491228070176, | |
| "grad_norm": 4.334061622619629, | |
| "learning_rate": 5.084559147612244e-07, | |
| "loss": 0.9049, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.8640350877192983, | |
| "grad_norm": 3.781756639480591, | |
| "learning_rate": 4.776821637170525e-07, | |
| "loss": 0.9344, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.868421052631579, | |
| "grad_norm": 3.749248504638672, | |
| "learning_rate": 4.4782225015391754e-07, | |
| "loss": 0.9594, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8728070175438597, | |
| "grad_norm": 3.8009796142578125, | |
| "learning_rate": 4.1888220794851386e-07, | |
| "loss": 0.9519, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8771929824561403, | |
| "grad_norm": 3.914064645767212, | |
| "learning_rate": 3.908678850965425e-07, | |
| "loss": 0.9539, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.881578947368421, | |
| "grad_norm": 3.75138521194458, | |
| "learning_rate": 3.6378494253099307e-07, | |
| "loss": 0.9506, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8859649122807017, | |
| "grad_norm": 3.7791149616241455, | |
| "learning_rate": 3.3763885297822153e-07, | |
| "loss": 0.9309, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.8903508771929824, | |
| "grad_norm": 3.780644655227661, | |
| "learning_rate": 3.1243489985206097e-07, | |
| "loss": 0.9497, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.8947368421052632, | |
| "grad_norm": 4.0235676765441895, | |
| "learning_rate": 2.8817817618618846e-07, | |
| "loss": 0.9707, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.8991228070175439, | |
| "grad_norm": 3.802351951599121, | |
| "learning_rate": 2.648735836049615e-07, | |
| "loss": 0.953, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9035087719298246, | |
| "grad_norm": 3.995856523513794, | |
| "learning_rate": 2.4252583133292927e-07, | |
| "loss": 0.9465, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9078947368421053, | |
| "grad_norm": 4.330905914306641, | |
| "learning_rate": 2.2113943524323167e-07, | |
| "loss": 0.9527, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.9122807017543859, | |
| "grad_norm": 4.164973258972168, | |
| "learning_rate": 2.007187169450603e-07, | |
| "loss": 0.9498, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9166666666666666, | |
| "grad_norm": 3.576552391052246, | |
| "learning_rate": 1.8126780291038037e-07, | |
| "loss": 0.9582, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 3.625762939453125, | |
| "learning_rate": 1.6279062364008446e-07, | |
| "loss": 0.9169, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9254385964912281, | |
| "grad_norm": 3.5316288471221924, | |
| "learning_rate": 1.4529091286973994e-07, | |
| "loss": 0.9789, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9298245614035088, | |
| "grad_norm": 3.8600916862487793, | |
| "learning_rate": 1.2877220681510927e-07, | |
| "loss": 0.9705, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.9342105263157895, | |
| "grad_norm": 3.9807686805725098, | |
| "learning_rate": 1.1323784345757205e-07, | |
| "loss": 0.9249, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.9385964912280702, | |
| "grad_norm": 3.589660406112671, | |
| "learning_rate": 9.869096186961025e-08, | |
| "loss": 0.9967, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.9429824561403509, | |
| "grad_norm": 3.8679358959198, | |
| "learning_rate": 8.513450158049109e-08, | |
| "loss": 0.9782, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.9473684210526315, | |
| "grad_norm": 4.188008785247803, | |
| "learning_rate": 7.257120198226219e-08, | |
| "loss": 0.9853, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.9517543859649122, | |
| "grad_norm": 4.046226978302002, | |
| "learning_rate": 6.100360177619946e-08, | |
| "loss": 0.8929, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.956140350877193, | |
| "grad_norm": 7.8870110511779785, | |
| "learning_rate": 5.0434038459801213e-08, | |
| "loss": 0.9297, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.9605263157894737, | |
| "grad_norm": 3.9931716918945312, | |
| "learning_rate": 4.086464785444777e-08, | |
| "loss": 0.9525, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.9649122807017544, | |
| "grad_norm": 3.876042604446411, | |
| "learning_rate": 3.229736367380498e-08, | |
| "loss": 0.9839, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.9692982456140351, | |
| "grad_norm": 4.057157516479492, | |
| "learning_rate": 2.4733917133077378e-08, | |
| "loss": 0.9349, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.9736842105263158, | |
| "grad_norm": 3.735743522644043, | |
| "learning_rate": 1.8175836599173545e-08, | |
| "loss": 0.9716, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.9780701754385965, | |
| "grad_norm": 3.7397193908691406, | |
| "learning_rate": 1.2624447281867625e-08, | |
| "loss": 0.9661, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9824561403508771, | |
| "grad_norm": 3.6132941246032715, | |
| "learning_rate": 8.080870966008513e-09, | |
| "loss": 0.9614, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9868421052631579, | |
| "grad_norm": 4.1698784828186035, | |
| "learning_rate": 4.546025784837316e-09, | |
| "loss": 0.9655, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.9912280701754386, | |
| "grad_norm": 3.830890655517578, | |
| "learning_rate": 2.0206260344590724e-09, | |
| "loss": 0.9867, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.9956140350877193, | |
| "grad_norm": 3.9525303840637207, | |
| "learning_rate": 5.051820295032262e-10, | |
| "loss": 0.9372, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 4.012087345123291, | |
| "learning_rate": 0.0, | |
| "loss": 0.9237, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 228, | |
| "total_flos": 1.387246770039292e+18, | |
| "train_loss": 1.0000714727661066, | |
| "train_runtime": 2851.6353, | |
| "train_samples_per_second": 20.461, | |
| "train_steps_per_second": 0.08 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 228, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.387246770039292e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |