| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 200, |
| "global_step": 668, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0029940119760479044, |
| "grad_norm": 2.694822704587142, |
| "learning_rate": 9.999944704978835e-06, |
| "loss": 0.174, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.005988023952095809, |
| "grad_norm": 1.540694377428953, |
| "learning_rate": 9.999778821138357e-06, |
| "loss": 0.1031, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.008982035928143712, |
| "grad_norm": 1.6650818674344259, |
| "learning_rate": 9.999502352147583e-06, |
| "loss": 0.1121, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.011976047904191617, |
| "grad_norm": 1.7580445000808085, |
| "learning_rate": 9.999115304121459e-06, |
| "loss": 0.1051, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.014970059880239521, |
| "grad_norm": 1.7432661703660053, |
| "learning_rate": 9.998617685620715e-06, |
| "loss": 0.1052, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.017964071856287425, |
| "grad_norm": 1.5622913387439772, |
| "learning_rate": 9.998009507651683e-06, |
| "loss": 0.1033, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.020958083832335328, |
| "grad_norm": 1.534172674280163, |
| "learning_rate": 9.997290783666048e-06, |
| "loss": 0.1035, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.023952095808383235, |
| "grad_norm": 1.4876532660064046, |
| "learning_rate": 9.996461529560553e-06, |
| "loss": 0.0952, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02694610778443114, |
| "grad_norm": 1.5065957449770506, |
| "learning_rate": 9.995521763676645e-06, |
| "loss": 0.1145, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.029940119760479042, |
| "grad_norm": 1.3513837861157794, |
| "learning_rate": 9.994471506800078e-06, |
| "loss": 0.0975, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.03293413173652695, |
| "grad_norm": 1.4058269709713151, |
| "learning_rate": 9.993310782160439e-06, |
| "loss": 0.1068, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.03592814371257485, |
| "grad_norm": 1.3803877704214274, |
| "learning_rate": 9.992039615430648e-06, |
| "loss": 0.0921, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.038922155688622756, |
| "grad_norm": 1.3945715898711406, |
| "learning_rate": 9.99065803472638e-06, |
| "loss": 0.1114, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.041916167664670656, |
| "grad_norm": 1.4546816139508025, |
| "learning_rate": 9.989166070605447e-06, |
| "loss": 0.1072, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.04491017964071856, |
| "grad_norm": 1.480351599059349, |
| "learning_rate": 9.98756375606713e-06, |
| "loss": 0.1, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.04790419161676647, |
| "grad_norm": 1.46275915665189, |
| "learning_rate": 9.985851126551428e-06, |
| "loss": 0.113, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.05089820359281437, |
| "grad_norm": 1.0722369900557316, |
| "learning_rate": 9.9840282199383e-06, |
| "loss": 0.0803, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.05389221556886228, |
| "grad_norm": 1.3430666265845805, |
| "learning_rate": 9.982095076546806e-06, |
| "loss": 0.1182, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.05688622754491018, |
| "grad_norm": 1.561360979226197, |
| "learning_rate": 9.980051739134235e-06, |
| "loss": 0.1272, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.059880239520958084, |
| "grad_norm": 1.2716064252529893, |
| "learning_rate": 9.977898252895133e-06, |
| "loss": 0.1182, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.06287425149700598, |
| "grad_norm": 1.2433276467135879, |
| "learning_rate": 9.975634665460333e-06, |
| "loss": 0.1136, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.0658682634730539, |
| "grad_norm": 1.4562587683694241, |
| "learning_rate": 9.973261026895878e-06, |
| "loss": 0.1193, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.0688622754491018, |
| "grad_norm": 1.2046318508865133, |
| "learning_rate": 9.970777389701927e-06, |
| "loss": 0.1127, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.0718562874251497, |
| "grad_norm": 1.1209438570730352, |
| "learning_rate": 9.968183808811586e-06, |
| "loss": 0.0894, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.0748502994011976, |
| "grad_norm": 1.4258740274999953, |
| "learning_rate": 9.965480341589702e-06, |
| "loss": 0.1086, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.07784431137724551, |
| "grad_norm": 1.348457039378542, |
| "learning_rate": 9.962667047831585e-06, |
| "loss": 0.1076, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.08083832335329341, |
| "grad_norm": 1.3067044062679876, |
| "learning_rate": 9.95974398976169e-06, |
| "loss": 0.1162, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.08383233532934131, |
| "grad_norm": 1.280806605368862, |
| "learning_rate": 9.95671123203224e-06, |
| "loss": 0.1143, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.08682634730538923, |
| "grad_norm": 1.4042111584668944, |
| "learning_rate": 9.953568841721796e-06, |
| "loss": 0.1154, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.08982035928143713, |
| "grad_norm": 1.2535340127099879, |
| "learning_rate": 9.950316888333775e-06, |
| "loss": 0.1125, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.09281437125748503, |
| "grad_norm": 1.0560186599040056, |
| "learning_rate": 9.946955443794908e-06, |
| "loss": 0.0976, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.09580838323353294, |
| "grad_norm": 1.4146974136560293, |
| "learning_rate": 9.943484582453653e-06, |
| "loss": 0.1072, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.09880239520958084, |
| "grad_norm": 1.4941301730987362, |
| "learning_rate": 9.939904381078553e-06, |
| "loss": 0.102, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.10179640718562874, |
| "grad_norm": 1.413313808359942, |
| "learning_rate": 9.93621491885653e-06, |
| "loss": 0.1153, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.10479041916167664, |
| "grad_norm": 1.2850277969498978, |
| "learning_rate": 9.932416277391144e-06, |
| "loss": 0.103, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.10778443113772455, |
| "grad_norm": 1.2903830996306178, |
| "learning_rate": 9.928508540700775e-06, |
| "loss": 0.1235, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.11077844311377245, |
| "grad_norm": 1.5326088245895257, |
| "learning_rate": 9.924491795216777e-06, |
| "loss": 0.1329, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.11377245508982035, |
| "grad_norm": 1.3215649330381873, |
| "learning_rate": 9.920366129781564e-06, |
| "loss": 0.1159, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.11676646706586827, |
| "grad_norm": 1.042094511765466, |
| "learning_rate": 9.916131635646635e-06, |
| "loss": 0.0936, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.11976047904191617, |
| "grad_norm": 1.3770014683229508, |
| "learning_rate": 9.91178840647057e-06, |
| "loss": 0.1272, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.12275449101796407, |
| "grad_norm": 1.1931812346620854, |
| "learning_rate": 9.907336538316946e-06, |
| "loss": 0.1146, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.12574850299401197, |
| "grad_norm": 0.9277316058880373, |
| "learning_rate": 9.902776129652223e-06, |
| "loss": 0.0784, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.12874251497005987, |
| "grad_norm": 1.0765649656661282, |
| "learning_rate": 9.898107281343557e-06, |
| "loss": 0.1004, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.1317365269461078, |
| "grad_norm": 0.9926609945457437, |
| "learning_rate": 9.893330096656576e-06, |
| "loss": 0.0881, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.1347305389221557, |
| "grad_norm": 1.222816772091786, |
| "learning_rate": 9.888444681253087e-06, |
| "loss": 0.1075, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.1377245508982036, |
| "grad_norm": 1.2894461166853382, |
| "learning_rate": 9.883451143188753e-06, |
| "loss": 0.113, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.1407185628742515, |
| "grad_norm": 1.121235366858226, |
| "learning_rate": 9.878349592910694e-06, |
| "loss": 0.0981, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.1437125748502994, |
| "grad_norm": 1.273548714881534, |
| "learning_rate": 9.873140143255035e-06, |
| "loss": 0.1205, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.1467065868263473, |
| "grad_norm": 1.2866045456682342, |
| "learning_rate": 9.867822909444435e-06, |
| "loss": 0.1119, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.1497005988023952, |
| "grad_norm": 1.2783141532135376, |
| "learning_rate": 9.862398009085511e-06, |
| "loss": 0.1023, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.15269461077844312, |
| "grad_norm": 1.3325991624769549, |
| "learning_rate": 9.856865562166256e-06, |
| "loss": 0.1155, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.15568862275449102, |
| "grad_norm": 1.0915358634542538, |
| "learning_rate": 9.851225691053382e-06, |
| "loss": 0.0946, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.15868263473053892, |
| "grad_norm": 1.3812817768812469, |
| "learning_rate": 9.8454785204896e-06, |
| "loss": 0.1246, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.16167664670658682, |
| "grad_norm": 1.1711427977372355, |
| "learning_rate": 9.83962417759088e-06, |
| "loss": 0.1038, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.16467065868263472, |
| "grad_norm": 1.3270361072749957, |
| "learning_rate": 9.833662791843628e-06, |
| "loss": 0.1166, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.16766467065868262, |
| "grad_norm": 1.1961374353611072, |
| "learning_rate": 9.827594495101824e-06, |
| "loss": 0.1082, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.17065868263473055, |
| "grad_norm": 1.2508456378463604, |
| "learning_rate": 9.821419421584108e-06, |
| "loss": 0.109, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.17365269461077845, |
| "grad_norm": 1.5853611337924214, |
| "learning_rate": 9.815137707870806e-06, |
| "loss": 0.1481, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.17664670658682635, |
| "grad_norm": 1.3533294342793396, |
| "learning_rate": 9.808749492900917e-06, |
| "loss": 0.1257, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.17964071856287425, |
| "grad_norm": 1.3361681014706468, |
| "learning_rate": 9.802254917969033e-06, |
| "loss": 0.1062, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.18263473053892215, |
| "grad_norm": 1.5134202209845473, |
| "learning_rate": 9.795654126722218e-06, |
| "loss": 0.1307, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.18562874251497005, |
| "grad_norm": 1.1706733413997306, |
| "learning_rate": 9.788947265156828e-06, |
| "loss": 0.1035, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.18862275449101795, |
| "grad_norm": 1.0889556046235287, |
| "learning_rate": 9.782134481615282e-06, |
| "loss": 0.0987, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.19161676646706588, |
| "grad_norm": 1.2398715014649333, |
| "learning_rate": 9.775215926782788e-06, |
| "loss": 0.1188, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.19461077844311378, |
| "grad_norm": 1.3618169558692828, |
| "learning_rate": 9.768191753683997e-06, |
| "loss": 0.1125, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.19760479041916168, |
| "grad_norm": 1.1704468214057708, |
| "learning_rate": 9.761062117679632e-06, |
| "loss": 0.1136, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.20059880239520958, |
| "grad_norm": 1.1182850488025071, |
| "learning_rate": 9.75382717646304e-06, |
| "loss": 0.1044, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.20359281437125748, |
| "grad_norm": 1.0346184105202574, |
| "learning_rate": 9.746487090056712e-06, |
| "loss": 0.0991, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.20658682634730538, |
| "grad_norm": 1.062900689169356, |
| "learning_rate": 9.739042020808746e-06, |
| "loss": 0.1024, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.20958083832335328, |
| "grad_norm": 1.0911542026965957, |
| "learning_rate": 9.73149213338924e-06, |
| "loss": 0.1136, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2125748502994012, |
| "grad_norm": 1.2496717021456991, |
| "learning_rate": 9.72383759478667e-06, |
| "loss": 0.1207, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.2155688622754491, |
| "grad_norm": 1.080099379875001, |
| "learning_rate": 9.71607857430419e-06, |
| "loss": 0.0899, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.218562874251497, |
| "grad_norm": 1.4072414605443209, |
| "learning_rate": 9.708215243555875e-06, |
| "loss": 0.1291, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.2215568862275449, |
| "grad_norm": 1.1700638225577527, |
| "learning_rate": 9.700247776462944e-06, |
| "loss": 0.103, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.2245508982035928, |
| "grad_norm": 1.2565934320808063, |
| "learning_rate": 9.6921763492499e-06, |
| "loss": 0.1099, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.2275449101796407, |
| "grad_norm": 0.9546525733169299, |
| "learning_rate": 9.68400114044064e-06, |
| "loss": 0.0804, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.23053892215568864, |
| "grad_norm": 1.08478464224836, |
| "learning_rate": 9.6757223308545e-06, |
| "loss": 0.0939, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.23353293413173654, |
| "grad_norm": 1.3415590730099887, |
| "learning_rate": 9.667340103602263e-06, |
| "loss": 0.1208, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.23652694610778444, |
| "grad_norm": 1.1986328154053258, |
| "learning_rate": 9.658854644082099e-06, |
| "loss": 0.1046, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.23952095808383234, |
| "grad_norm": 1.7151828135588658, |
| "learning_rate": 9.650266139975474e-06, |
| "loss": 0.1399, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.24251497005988024, |
| "grad_norm": 1.1444934755253402, |
| "learning_rate": 9.641574781242999e-06, |
| "loss": 0.106, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.24550898203592814, |
| "grad_norm": 1.0474417551763626, |
| "learning_rate": 9.632780760120217e-06, |
| "loss": 0.0957, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.24850299401197604, |
| "grad_norm": 1.0019186144881402, |
| "learning_rate": 9.62388427111336e-06, |
| "loss": 0.0952, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.25149700598802394, |
| "grad_norm": 1.045782599298007, |
| "learning_rate": 9.614885510995047e-06, |
| "loss": 0.098, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.25449101796407186, |
| "grad_norm": 1.0596517738810545, |
| "learning_rate": 9.605784678799934e-06, |
| "loss": 0.0939, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.25748502994011974, |
| "grad_norm": 1.3158738779872337, |
| "learning_rate": 9.596581975820304e-06, |
| "loss": 0.107, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.26047904191616766, |
| "grad_norm": 1.1584081147238108, |
| "learning_rate": 9.587277605601617e-06, |
| "loss": 0.1091, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.2634730538922156, |
| "grad_norm": 1.3742865320327458, |
| "learning_rate": 9.577871773938013e-06, |
| "loss": 0.1253, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.26646706586826346, |
| "grad_norm": 1.3133549998241743, |
| "learning_rate": 9.568364688867757e-06, |
| "loss": 0.1304, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.2694610778443114, |
| "grad_norm": 0.9687727348118643, |
| "learning_rate": 9.558756560668637e-06, |
| "loss": 0.0938, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.27245508982035926, |
| "grad_norm": 1.1880519099821893, |
| "learning_rate": 9.549047601853313e-06, |
| "loss": 0.1184, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.2754491017964072, |
| "grad_norm": 1.1641235380588761, |
| "learning_rate": 9.539238027164618e-06, |
| "loss": 0.1211, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.27844311377245506, |
| "grad_norm": 0.993560219725402, |
| "learning_rate": 9.52932805357081e-06, |
| "loss": 0.0986, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.281437125748503, |
| "grad_norm": 1.0050842703408378, |
| "learning_rate": 9.519317900260769e-06, |
| "loss": 0.1076, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.2844311377245509, |
| "grad_norm": 1.0082192441585922, |
| "learning_rate": 9.509207788639148e-06, |
| "loss": 0.1075, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.2874251497005988, |
| "grad_norm": 1.0364990627964445, |
| "learning_rate": 9.498997942321484e-06, |
| "loss": 0.1163, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.2904191616766467, |
| "grad_norm": 1.0083859812100648, |
| "learning_rate": 9.488688587129243e-06, |
| "loss": 0.1046, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.2934131736526946, |
| "grad_norm": 0.9802364376711591, |
| "learning_rate": 9.47827995108483e-06, |
| "loss": 0.0908, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.2964071856287425, |
| "grad_norm": 1.2296567986616285, |
| "learning_rate": 9.467772264406545e-06, |
| "loss": 0.1142, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.2994011976047904, |
| "grad_norm": 0.9944404277217507, |
| "learning_rate": 9.457165759503492e-06, |
| "loss": 0.0933, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.3023952095808383, |
| "grad_norm": 1.1318248455399238, |
| "learning_rate": 9.446460670970436e-06, |
| "loss": 0.0995, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.30538922155688625, |
| "grad_norm": 1.0856325033453054, |
| "learning_rate": 9.435657235582616e-06, |
| "loss": 0.0926, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.3083832335329341, |
| "grad_norm": 1.2935359839355358, |
| "learning_rate": 9.424755692290507e-06, |
| "loss": 0.1179, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.31137724550898205, |
| "grad_norm": 1.1697314711229891, |
| "learning_rate": 9.413756282214538e-06, |
| "loss": 0.1096, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.3143712574850299, |
| "grad_norm": 1.039674731853863, |
| "learning_rate": 9.402659248639749e-06, |
| "loss": 0.1061, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.31736526946107785, |
| "grad_norm": 1.21813072366842, |
| "learning_rate": 9.391464837010428e-06, |
| "loss": 0.1128, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.3203592814371258, |
| "grad_norm": 1.2347699420503069, |
| "learning_rate": 9.380173294924661e-06, |
| "loss": 0.114, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.32335329341317365, |
| "grad_norm": 1.0952193810495732, |
| "learning_rate": 9.368784872128877e-06, |
| "loss": 0.1197, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.3263473053892216, |
| "grad_norm": 1.1988107428883443, |
| "learning_rate": 9.357299820512305e-06, |
| "loss": 0.1097, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.32934131736526945, |
| "grad_norm": 1.3238374098387835, |
| "learning_rate": 9.345718394101412e-06, |
| "loss": 0.1168, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.3323353293413174, |
| "grad_norm": 1.154675757029323, |
| "learning_rate": 9.334040849054288e-06, |
| "loss": 0.1221, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.33532934131736525, |
| "grad_norm": 1.1765490559607166, |
| "learning_rate": 9.322267443654974e-06, |
| "loss": 0.1179, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.3383233532934132, |
| "grad_norm": 1.2072662482322083, |
| "learning_rate": 9.310398438307747e-06, |
| "loss": 0.1337, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.3413173652694611, |
| "grad_norm": 1.1674159812190554, |
| "learning_rate": 9.29843409553137e-06, |
| "loss": 0.1142, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.344311377245509, |
| "grad_norm": 1.0999888356981997, |
| "learning_rate": 9.286374679953278e-06, |
| "loss": 0.1058, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.3473053892215569, |
| "grad_norm": 0.9399577245375, |
| "learning_rate": 9.274220458303727e-06, |
| "loss": 0.0989, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.3502994011976048, |
| "grad_norm": 1.140609139219963, |
| "learning_rate": 9.261971699409893e-06, |
| "loss": 0.1144, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.3532934131736527, |
| "grad_norm": 1.2038276135727357, |
| "learning_rate": 9.249628674189928e-06, |
| "loss": 0.1099, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.3562874251497006, |
| "grad_norm": 1.1233255402091744, |
| "learning_rate": 9.237191655646972e-06, |
| "loss": 0.1142, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.3592814371257485, |
| "grad_norm": 1.1927948048157466, |
| "learning_rate": 9.224660918863104e-06, |
| "loss": 0.111, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.36227544910179643, |
| "grad_norm": 1.1248820119091534, |
| "learning_rate": 9.212036740993265e-06, |
| "loss": 0.1167, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.3652694610778443, |
| "grad_norm": 1.0535201928601414, |
| "learning_rate": 9.199319401259132e-06, |
| "loss": 0.0988, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.36826347305389223, |
| "grad_norm": 1.0549521131159716, |
| "learning_rate": 9.186509180942928e-06, |
| "loss": 0.0921, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.3712574850299401, |
| "grad_norm": 1.1359154866076464, |
| "learning_rate": 9.173606363381218e-06, |
| "loss": 0.1157, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.37425149700598803, |
| "grad_norm": 1.1451306175535456, |
| "learning_rate": 9.16061123395863e-06, |
| "loss": 0.1043, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.3772455089820359, |
| "grad_norm": 1.1181962731375765, |
| "learning_rate": 9.147524080101543e-06, |
| "loss": 0.0981, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.38023952095808383, |
| "grad_norm": 1.2551946563935505, |
| "learning_rate": 9.134345191271742e-06, |
| "loss": 0.1197, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.38323353293413176, |
| "grad_norm": 1.1583239194903672, |
| "learning_rate": 9.121074858959997e-06, |
| "loss": 0.124, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.38622754491017963, |
| "grad_norm": 1.196120284188902, |
| "learning_rate": 9.107713376679634e-06, |
| "loss": 0.121, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.38922155688622756, |
| "grad_norm": 1.1742439607970117, |
| "learning_rate": 9.094261039960028e-06, |
| "loss": 0.1, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.39221556886227543, |
| "grad_norm": 1.292026851324083, |
| "learning_rate": 9.08071814634008e-06, |
| "loss": 0.1176, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.39520958083832336, |
| "grad_norm": 1.1451954562828406, |
| "learning_rate": 9.067084995361623e-06, |
| "loss": 0.1097, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.39820359281437123, |
| "grad_norm": 1.207998049376194, |
| "learning_rate": 9.053361888562807e-06, |
| "loss": 0.1213, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.40119760479041916, |
| "grad_norm": 1.2132625284976668, |
| "learning_rate": 9.039549129471423e-06, |
| "loss": 0.1186, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.4041916167664671, |
| "grad_norm": 0.9639736738335528, |
| "learning_rate": 9.025647023598196e-06, |
| "loss": 0.0944, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.40718562874251496, |
| "grad_norm": 1.032185303357511, |
| "learning_rate": 9.011655878430018e-06, |
| "loss": 0.1012, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.4101796407185629, |
| "grad_norm": 1.3933864089957346, |
| "learning_rate": 8.99757600342316e-06, |
| "loss": 0.1462, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.41317365269461076, |
| "grad_norm": 1.0900174358692771, |
| "learning_rate": 8.983407709996415e-06, |
| "loss": 0.0983, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.4161676646706587, |
| "grad_norm": 0.9682241109471825, |
| "learning_rate": 8.969151311524215e-06, |
| "loss": 0.0947, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.41916167664670656, |
| "grad_norm": 1.2153257975847114, |
| "learning_rate": 8.954807123329703e-06, |
| "loss": 0.1226, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.4221556886227545, |
| "grad_norm": 1.046922805146823, |
| "learning_rate": 8.940375462677758e-06, |
| "loss": 0.1069, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.4251497005988024, |
| "grad_norm": 1.1072835807231411, |
| "learning_rate": 8.92585664876797e-06, |
| "loss": 0.1233, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.4281437125748503, |
| "grad_norm": 1.0130173393868498, |
| "learning_rate": 8.911251002727588e-06, |
| "loss": 0.0983, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.4311377245508982, |
| "grad_norm": 0.9712494160291322, |
| "learning_rate": 8.896558847604414e-06, |
| "loss": 0.1033, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.4341317365269461, |
| "grad_norm": 1.1440674743071744, |
| "learning_rate": 8.881780508359661e-06, |
| "loss": 0.1181, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.437125748502994, |
| "grad_norm": 1.2084807313190982, |
| "learning_rate": 8.86691631186076e-06, |
| "loss": 0.1205, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.44011976047904194, |
| "grad_norm": 0.9339544529717132, |
| "learning_rate": 8.851966586874138e-06, |
| "loss": 0.092, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.4431137724550898, |
| "grad_norm": 1.195893147677474, |
| "learning_rate": 8.836931664057935e-06, |
| "loss": 0.1252, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.44610778443113774, |
| "grad_norm": 1.1554746188228662, |
| "learning_rate": 8.821811875954705e-06, |
| "loss": 0.109, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.4491017964071856, |
| "grad_norm": 1.0358188700192996, |
| "learning_rate": 8.806607556984045e-06, |
| "loss": 0.1147, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.45209580838323354, |
| "grad_norm": 1.1495619704118192, |
| "learning_rate": 8.791319043435213e-06, |
| "loss": 0.1108, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.4550898203592814, |
| "grad_norm": 1.0767571878157336, |
| "learning_rate": 8.775946673459682e-06, |
| "loss": 0.1042, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.45808383233532934, |
| "grad_norm": 1.033094443704525, |
| "learning_rate": 8.76049078706366e-06, |
| "loss": 0.0989, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.46107784431137727, |
| "grad_norm": 1.0818141713509735, |
| "learning_rate": 8.744951726100572e-06, |
| "loss": 0.109, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.46407185628742514, |
| "grad_norm": 1.4560036834415162, |
| "learning_rate": 8.729329834263503e-06, |
| "loss": 0.0919, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.46706586826347307, |
| "grad_norm": 1.3249320979346864, |
| "learning_rate": 8.713625457077585e-06, |
| "loss": 0.1275, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.47005988023952094, |
| "grad_norm": 1.1823259804921005, |
| "learning_rate": 8.697838941892371e-06, |
| "loss": 0.116, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.47305389221556887, |
| "grad_norm": 1.0577662488118387, |
| "learning_rate": 8.681970637874131e-06, |
| "loss": 0.1123, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.47604790419161674, |
| "grad_norm": 1.223312576774771, |
| "learning_rate": 8.666020895998154e-06, |
| "loss": 0.1189, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.47904191616766467, |
| "grad_norm": 1.093078965639497, |
| "learning_rate": 8.64999006904096e-06, |
| "loss": 0.1024, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.4820359281437126, |
| "grad_norm": 1.3025555910186162, |
| "learning_rate": 8.63387851157252e-06, |
| "loss": 0.1246, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.48502994011976047, |
| "grad_norm": 1.0704130612666423, |
| "learning_rate": 8.617686579948396e-06, |
| "loss": 0.0976, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.4880239520958084, |
| "grad_norm": 1.2594827510216382, |
| "learning_rate": 8.60141463230187e-06, |
| "loss": 0.1138, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.49101796407185627, |
| "grad_norm": 1.229833818844591, |
| "learning_rate": 8.585063028536015e-06, |
| "loss": 0.1204, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.4940119760479042, |
| "grad_norm": 1.2596948575451763, |
| "learning_rate": 8.568632130315747e-06, |
| "loss": 0.1263, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.49700598802395207, |
| "grad_norm": 1.0330515879808106, |
| "learning_rate": 8.552122301059807e-06, |
| "loss": 0.1024, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 1.0937944524544967, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.0979, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.5029940119760479, |
| "grad_norm": 1.0781310185766044, |
| "learning_rate": 8.518867311836808e-06, |
| "loss": 0.1194, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.5059880239520959, |
| "grad_norm": 0.9939367851701493, |
| "learning_rate": 8.502122887403882e-06, |
| "loss": 0.088, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.5089820359281437, |
| "grad_norm": 1.020133013152269, |
| "learning_rate": 8.485301002987285e-06, |
| "loss": 0.0938, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.5119760479041916, |
| "grad_norm": 1.166186467385633, |
| "learning_rate": 8.468402030653598e-06, |
| "loss": 0.1204, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.5149700598802395, |
| "grad_norm": 0.9708486555429043, |
| "learning_rate": 8.451426344174433e-06, |
| "loss": 0.1002, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.5179640718562875, |
| "grad_norm": 1.1595729468396334, |
| "learning_rate": 8.434374319018165e-06, |
| "loss": 0.1107, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.5209580838323353, |
| "grad_norm": 1.091133620520665, |
| "learning_rate": 8.417246332341638e-06, |
| "loss": 0.1061, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.5239520958083832, |
| "grad_norm": 1.1501502000269177, |
| "learning_rate": 8.4000427629818e-06, |
| "loss": 0.1127, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.5269461077844312, |
| "grad_norm": 0.9818832434869366, |
| "learning_rate": 8.382763991447344e-06, |
| "loss": 0.0985, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.5299401197604791, |
| "grad_norm": 1.285223024185485, |
| "learning_rate": 8.365410399910287e-06, |
| "loss": 0.1178, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.5329341317365269, |
| "grad_norm": 1.2746897920320701, |
| "learning_rate": 8.347982372197515e-06, |
| "loss": 0.1166, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.5359281437125748, |
| "grad_norm": 1.0945045183262572, |
| "learning_rate": 8.33048029378229e-06, |
| "loss": 0.1095, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.5389221556886228, |
| "grad_norm": 1.1594313777901462, |
| "learning_rate": 8.312904551775731e-06, |
| "loss": 0.1232, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.5419161676646707, |
| "grad_norm": 1.2108016364395127, |
| "learning_rate": 8.295255534918249e-06, |
| "loss": 0.1429, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.5449101796407185, |
| "grad_norm": 1.3055603548974295, |
| "learning_rate": 8.277533633570948e-06, |
| "loss": 0.1317, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.5479041916167665, |
| "grad_norm": 1.1438874937764232, |
| "learning_rate": 8.25973923970699e-06, |
| "loss": 0.1173, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.5508982035928144, |
| "grad_norm": 1.1476605891455036, |
| "learning_rate": 8.241872746902934e-06, |
| "loss": 0.1215, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.5538922155688623, |
| "grad_norm": 1.1394979568405286, |
| "learning_rate": 8.223934550330015e-06, |
| "loss": 0.1157, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.5568862275449101, |
| "grad_norm": 1.0943696575087467, |
| "learning_rate": 8.20592504674542e-06, |
| "loss": 0.1135, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.5598802395209581, |
| "grad_norm": 1.3105972553683807, |
| "learning_rate": 8.187844634483495e-06, |
| "loss": 0.136, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.562874251497006, |
| "grad_norm": 1.2477435850517815, |
| "learning_rate": 8.16969371344696e-06, |
| "loss": 0.118, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.5658682634730539, |
| "grad_norm": 1.0440441044548117, |
| "learning_rate": 8.151472685098037e-06, |
| "loss": 0.1186, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.5688622754491018, |
| "grad_norm": 0.9937620218088092, |
| "learning_rate": 8.13318195244958e-06, |
| "loss": 0.1061, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.5718562874251497, |
| "grad_norm": 1.0553284676258374, |
| "learning_rate": 8.114821920056177e-06, |
| "loss": 0.1086, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.5748502994011976, |
| "grad_norm": 1.2465752458529162, |
| "learning_rate": 8.096392994005177e-06, |
| "loss": 0.1213, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.5778443113772455, |
| "grad_norm": 1.0274519007784997, |
| "learning_rate": 8.077895581907719e-06, |
| "loss": 0.1034, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.5808383233532934, |
| "grad_norm": 1.130587784163485, |
| "learning_rate": 8.059330092889724e-06, |
| "loss": 0.1132, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.5838323353293413, |
| "grad_norm": 1.114546483447677, |
| "learning_rate": 8.040696937582833e-06, |
| "loss": 0.1137, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.5868263473053892, |
| "grad_norm": 0.873781031172648, |
| "learning_rate": 8.021996528115335e-06, |
| "loss": 0.0838, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.5898203592814372, |
| "grad_norm": 1.2321249744806237, |
| "learning_rate": 8.003229278103044e-06, |
| "loss": 0.1137, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.592814371257485, |
| "grad_norm": 1.1358395333676212, |
| "learning_rate": 7.984395602640153e-06, |
| "loss": 0.1179, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.5958083832335329, |
| "grad_norm": 1.0348332164450986, |
| "learning_rate": 7.96549591829006e-06, |
| "loss": 0.1058, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.5988023952095808, |
| "grad_norm": 0.9547236919983346, |
| "learning_rate": 7.946530643076138e-06, |
| "loss": 0.0921, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.5988023952095808, |
| "eval_loss": 0.12016214430332184, |
| "eval_runtime": 8.3266, |
| "eval_samples_per_second": 6.485, |
| "eval_steps_per_second": 1.681, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.6017964071856288, |
| "grad_norm": 0.9344428491844222, |
| "learning_rate": 7.927500196472506e-06, |
| "loss": 0.0988, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.6047904191616766, |
| "grad_norm": 1.0777875715216008, |
| "learning_rate": 7.908404999394747e-06, |
| "loss": 0.1019, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.6077844311377245, |
| "grad_norm": 1.237958694976887, |
| "learning_rate": 7.889245474190588e-06, |
| "loss": 0.12, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.6107784431137725, |
| "grad_norm": 1.4421618806920444, |
| "learning_rate": 7.870022044630569e-06, |
| "loss": 0.1405, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.6137724550898204, |
| "grad_norm": 1.1144924473329114, |
| "learning_rate": 7.85073513589867e-06, |
| "loss": 0.1151, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.6167664670658682, |
| "grad_norm": 1.0921040138700113, |
| "learning_rate": 7.831385174582901e-06, |
| "loss": 0.1098, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.6197604790419161, |
| "grad_norm": 1.0662690709409506, |
| "learning_rate": 7.81197258866587e-06, |
| "loss": 0.0906, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.6227544910179641, |
| "grad_norm": 1.055975543135569, |
| "learning_rate": 7.792497807515317e-06, |
| "loss": 0.1022, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.625748502994012, |
| "grad_norm": 0.972964899017892, |
| "learning_rate": 7.772961261874615e-06, |
| "loss": 0.1007, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.6287425149700598, |
| "grad_norm": 1.1345049478890776, |
| "learning_rate": 7.75336338385325e-06, |
| "loss": 0.1112, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.6317365269461078, |
| "grad_norm": 1.1918299507322672, |
| "learning_rate": 7.733704606917248e-06, |
| "loss": 0.1281, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.6347305389221557, |
| "grad_norm": 1.1044275348689654, |
| "learning_rate": 7.713985365879607e-06, |
| "loss": 0.1035, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.6377245508982036, |
| "grad_norm": 1.2196447175508784, |
| "learning_rate": 7.694206096890667e-06, |
| "loss": 0.1354, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.6407185628742516, |
| "grad_norm": 0.9858226926171154, |
| "learning_rate": 7.674367237428467e-06, |
| "loss": 0.1005, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.6437125748502994, |
| "grad_norm": 1.0313314743245496, |
| "learning_rate": 7.654469226289068e-06, |
| "loss": 0.1035, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.6467065868263473, |
| "grad_norm": 1.2073723687580094, |
| "learning_rate": 7.63451250357685e-06, |
| "loss": 0.1215, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.6497005988023952, |
| "grad_norm": 1.1575984459704936, |
| "learning_rate": 7.614497510694774e-06, |
| "loss": 0.103, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.6526946107784432, |
| "grad_norm": 1.1928147558872801, |
| "learning_rate": 7.5944246903346204e-06, |
| "loss": 0.1138, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.655688622754491, |
| "grad_norm": 0.8982426003073268, |
| "learning_rate": 7.574294486467204e-06, |
| "loss": 0.0831, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.6586826347305389, |
| "grad_norm": 0.9394252234084429, |
| "learning_rate": 7.55410734433254e-06, |
| "loss": 0.0959, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.6616766467065869, |
| "grad_norm": 1.137723030712234, |
| "learning_rate": 7.533863710430011e-06, |
| "loss": 0.1109, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.6646706586826348, |
| "grad_norm": 0.8267045625199, |
| "learning_rate": 7.513564032508484e-06, |
| "loss": 0.0842, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.6676646706586826, |
| "grad_norm": 1.1847186477673504, |
| "learning_rate": 7.493208759556406e-06, |
| "loss": 0.1151, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.6706586826347305, |
| "grad_norm": 1.0731347002786096, |
| "learning_rate": 7.472798341791877e-06, |
| "loss": 0.1087, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.6736526946107785, |
| "grad_norm": 0.9153166575730822, |
| "learning_rate": 7.452333230652688e-06, |
| "loss": 0.0784, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.6766467065868264, |
| "grad_norm": 1.6389325670271402, |
| "learning_rate": 7.431813878786343e-06, |
| "loss": 0.1158, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.6796407185628742, |
| "grad_norm": 0.94424634794741, |
| "learning_rate": 7.4112407400400395e-06, |
| "loss": 0.0857, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.6826347305389222, |
| "grad_norm": 0.89702175760304, |
| "learning_rate": 7.390614269450633e-06, |
| "loss": 0.0831, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.6856287425149701, |
| "grad_norm": 1.016468144740291, |
| "learning_rate": 7.369934923234577e-06, |
| "loss": 0.0991, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.688622754491018, |
| "grad_norm": 1.182973315725974, |
| "learning_rate": 7.349203158777826e-06, |
| "loss": 0.1118, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.6916167664670658, |
| "grad_norm": 1.0092567790353997, |
| "learning_rate": 7.32841943462572e-06, |
| "loss": 0.1024, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.6946107784431138, |
| "grad_norm": 1.1411601927497255, |
| "learning_rate": 7.3075842104728445e-06, |
| "loss": 0.1146, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.6976047904191617, |
| "grad_norm": 1.1078945128986997, |
| "learning_rate": 7.286697947152868e-06, |
| "loss": 0.1164, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.7005988023952096, |
| "grad_norm": 0.8662144303520954, |
| "learning_rate": 7.265761106628338e-06, |
| "loss": 0.0829, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.7035928143712575, |
| "grad_norm": 0.9752397406169193, |
| "learning_rate": 7.244774151980466e-06, |
| "loss": 0.0943, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.7065868263473054, |
| "grad_norm": 1.1117357986281629, |
| "learning_rate": 7.223737547398898e-06, |
| "loss": 0.1087, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.7095808383233533, |
| "grad_norm": 1.0339611771677366, |
| "learning_rate": 7.20265175817143e-06, |
| "loss": 0.1092, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.7125748502994012, |
| "grad_norm": 1.3500709005637261, |
| "learning_rate": 7.181517250673729e-06, |
| "loss": 0.1507, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.7155688622754491, |
| "grad_norm": 1.1511251305711105, |
| "learning_rate": 7.1603344923590065e-06, |
| "loss": 0.1098, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.718562874251497, |
| "grad_norm": 1.2192955758731912, |
| "learning_rate": 7.139103951747694e-06, |
| "loss": 0.1272, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.7215568862275449, |
| "grad_norm": 1.0776006747271616, |
| "learning_rate": 7.1178260984170675e-06, |
| "loss": 0.1067, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.7245508982035929, |
| "grad_norm": 0.9530867829023002, |
| "learning_rate": 7.0965014029908654e-06, |
| "loss": 0.0968, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.7275449101796407, |
| "grad_norm": 1.1096466841531403, |
| "learning_rate": 7.075130337128883e-06, |
| "loss": 0.1299, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.7305389221556886, |
| "grad_norm": 1.2165381641728317, |
| "learning_rate": 7.053713373516538e-06, |
| "loss": 0.1349, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.7335329341317365, |
| "grad_norm": 1.1146967815470412, |
| "learning_rate": 7.03225098585441e-06, |
| "loss": 0.1135, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.7365269461077845, |
| "grad_norm": 1.1148754947833899, |
| "learning_rate": 7.0107436488477694e-06, |
| "loss": 0.1266, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.7395209580838323, |
| "grad_norm": 1.1161083256305881, |
| "learning_rate": 6.989191838196083e-06, |
| "loss": 0.1194, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.7425149700598802, |
| "grad_norm": 0.9469562698402685, |
| "learning_rate": 6.9675960305824785e-06, |
| "loss": 0.099, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.7455089820359282, |
| "grad_norm": 1.151194749997882, |
| "learning_rate": 6.945956703663212e-06, |
| "loss": 0.1153, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.7485029940119761, |
| "grad_norm": 1.0882092511428272, |
| "learning_rate": 6.9242743360570985e-06, |
| "loss": 0.1078, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.7514970059880239, |
| "grad_norm": 1.028388896286411, |
| "learning_rate": 6.9025494073349284e-06, |
| "loss": 0.1096, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.7544910179640718, |
| "grad_norm": 0.9972504617324368, |
| "learning_rate": 6.880782398008862e-06, |
| "loss": 0.1087, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.7574850299401198, |
| "grad_norm": 0.9256438231559054, |
| "learning_rate": 6.858973789521792e-06, |
| "loss": 0.0922, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.7604790419161677, |
| "grad_norm": 1.0098118238043352, |
| "learning_rate": 6.837124064236709e-06, |
| "loss": 0.0967, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.7634730538922155, |
| "grad_norm": 0.9171644357934685, |
| "learning_rate": 6.815233705426019e-06, |
| "loss": 0.095, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.7664670658682635, |
| "grad_norm": 1.0311629178083461, |
| "learning_rate": 6.7933031972608644e-06, |
| "loss": 0.1009, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.7694610778443114, |
| "grad_norm": 1.2065768311047647, |
| "learning_rate": 6.771333024800411e-06, |
| "loss": 0.1127, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.7724550898203593, |
| "grad_norm": 1.1649667504142824, |
| "learning_rate": 6.74932367398112e-06, |
| "loss": 0.112, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.7754491017964071, |
| "grad_norm": 0.9308015487490596, |
| "learning_rate": 6.727275631605996e-06, |
| "loss": 0.0889, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.7784431137724551, |
| "grad_norm": 1.2121108383798198, |
| "learning_rate": 6.70518938533383e-06, |
| "loss": 0.1105, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.781437125748503, |
| "grad_norm": 0.9872567997673177, |
| "learning_rate": 6.683065423668403e-06, |
| "loss": 0.091, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.7844311377245509, |
| "grad_norm": 1.1434398705082174, |
| "learning_rate": 6.660904235947687e-06, |
| "loss": 0.1084, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.7874251497005988, |
| "grad_norm": 1.1135729967157757, |
| "learning_rate": 6.638706312333018e-06, |
| "loss": 0.1036, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.7904191616766467, |
| "grad_norm": 1.2172285636766491, |
| "learning_rate": 6.61647214379826e-06, |
| "loss": 0.1184, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.7934131736526946, |
| "grad_norm": 1.3488738740905057, |
| "learning_rate": 6.594202222118941e-06, |
| "loss": 0.1222, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.7964071856287425, |
| "grad_norm": 1.0269053596938786, |
| "learning_rate": 6.571897039861377e-06, |
| "loss": 0.1028, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.7994011976047904, |
| "grad_norm": 1.1129746486220893, |
| "learning_rate": 6.549557090371775e-06, |
| "loss": 0.1154, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.8023952095808383, |
| "grad_norm": 0.842333383910561, |
| "learning_rate": 6.527182867765333e-06, |
| "loss": 0.0771, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.8053892215568862, |
| "grad_norm": 1.3078259478030483, |
| "learning_rate": 6.504774866915291e-06, |
| "loss": 0.13, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.8083832335329342, |
| "grad_norm": 1.0001135093091333, |
| "learning_rate": 6.482333583442002e-06, |
| "loss": 0.0997, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.811377245508982, |
| "grad_norm": 1.1205663397366659, |
| "learning_rate": 6.459859513701967e-06, |
| "loss": 0.1063, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.8143712574850299, |
| "grad_norm": 1.2779493257817607, |
| "learning_rate": 6.437353154776848e-06, |
| "loss": 0.1157, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.8173652694610778, |
| "grad_norm": 1.0352680525112001, |
| "learning_rate": 6.414815004462483e-06, |
| "loss": 0.0886, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.8203592814371258, |
| "grad_norm": 1.0436596456460765, |
| "learning_rate": 6.3922455612578715e-06, |
| "loss": 0.113, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.8233532934131736, |
| "grad_norm": 0.9993307292108748, |
| "learning_rate": 6.369645324354149e-06, |
| "loss": 0.0914, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.8263473053892215, |
| "grad_norm": 1.2497821802624005, |
| "learning_rate": 6.3470147936235485e-06, |
| "loss": 0.13, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.8293413173652695, |
| "grad_norm": 0.8174766850501531, |
| "learning_rate": 6.3243544696083355e-06, |
| "loss": 0.0876, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.8323353293413174, |
| "grad_norm": 1.1314296946186437, |
| "learning_rate": 6.301664853509755e-06, |
| "loss": 0.1181, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.8353293413173652, |
| "grad_norm": 1.0004348069946, |
| "learning_rate": 6.278946447176924e-06, |
| "loss": 0.09, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.8383233532934131, |
| "grad_norm": 1.0671378195782413, |
| "learning_rate": 6.256199753095745e-06, |
| "loss": 0.1056, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.8413173652694611, |
| "grad_norm": 0.8364701680716429, |
| "learning_rate": 6.233425274377793e-06, |
| "loss": 0.0824, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.844311377245509, |
| "grad_norm": 1.1223141953145617, |
| "learning_rate": 6.21062351474918e-06, |
| "loss": 0.1032, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.8473053892215568, |
| "grad_norm": 1.0524119154471643, |
| "learning_rate": 6.18779497853942e-06, |
| "loss": 0.1081, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.8502994011976048, |
| "grad_norm": 1.0264788336701436, |
| "learning_rate": 6.164940170670266e-06, |
| "loss": 0.1069, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.8532934131736527, |
| "grad_norm": 0.981192757287711, |
| "learning_rate": 6.142059596644557e-06, |
| "loss": 0.0894, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.8562874251497006, |
| "grad_norm": 0.9615230917131988, |
| "learning_rate": 6.11915376253502e-06, |
| "loss": 0.0897, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.8592814371257484, |
| "grad_norm": 1.0337209438384687, |
| "learning_rate": 6.096223174973091e-06, |
| "loss": 0.1006, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.8622754491017964, |
| "grad_norm": 0.9905876169014615, |
| "learning_rate": 6.073268341137694e-06, |
| "loss": 0.095, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.8652694610778443, |
| "grad_norm": 1.1027961277676517, |
| "learning_rate": 6.050289768744042e-06, |
| "loss": 0.0998, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.8682634730538922, |
| "grad_norm": 1.083233411983881, |
| "learning_rate": 6.0272879660323936e-06, |
| "loss": 0.1096, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.8712574850299402, |
| "grad_norm": 1.0913128739322395, |
| "learning_rate": 6.004263441756815e-06, |
| "loss": 0.1059, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.874251497005988, |
| "grad_norm": 1.0909830526364266, |
| "learning_rate": 5.98121670517393e-06, |
| "loss": 0.1078, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.8772455089820359, |
| "grad_norm": 0.8883708109490787, |
| "learning_rate": 5.958148266031654e-06, |
| "loss": 0.0961, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.8802395209580839, |
| "grad_norm": 1.0038270303162207, |
| "learning_rate": 5.935058634557917e-06, |
| "loss": 0.0939, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.8832335329341318, |
| "grad_norm": 0.9312159317082542, |
| "learning_rate": 5.911948321449384e-06, |
| "loss": 0.0924, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.8862275449101796, |
| "grad_norm": 1.1127876140747484, |
| "learning_rate": 5.8888178378601565e-06, |
| "loss": 0.1104, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.8892215568862275, |
| "grad_norm": 1.0620371276229927, |
| "learning_rate": 5.865667695390468e-06, |
| "loss": 0.0993, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.8922155688622755, |
| "grad_norm": 0.8791295012960114, |
| "learning_rate": 5.842498406075363e-06, |
| "loss": 0.0843, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.8952095808383234, |
| "grad_norm": 0.9421170351009726, |
| "learning_rate": 5.819310482373381e-06, |
| "loss": 0.0815, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.8982035928143712, |
| "grad_norm": 1.1078125505045537, |
| "learning_rate": 5.796104437155213e-06, |
| "loss": 0.1117, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.9011976047904192, |
| "grad_norm": 0.9854231317801608, |
| "learning_rate": 5.772880783692363e-06, |
| "loss": 0.0887, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.9041916167664671, |
| "grad_norm": 1.1279967736862624, |
| "learning_rate": 5.749640035645798e-06, |
| "loss": 0.1063, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.907185628742515, |
| "grad_norm": 0.9960776335710316, |
| "learning_rate": 5.726382707054578e-06, |
| "loss": 0.0921, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.9101796407185628, |
| "grad_norm": 1.064755038701587, |
| "learning_rate": 5.703109312324493e-06, |
| "loss": 0.0961, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.9131736526946108, |
| "grad_norm": 1.1225010610453825, |
| "learning_rate": 5.679820366216684e-06, |
| "loss": 0.1181, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.9161676646706587, |
| "grad_norm": 0.9015413967576371, |
| "learning_rate": 5.656516383836263e-06, |
| "loss": 0.0787, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.9191616766467066, |
| "grad_norm": 1.1259253163412393, |
| "learning_rate": 5.6331978806209044e-06, |
| "loss": 0.1139, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.9221556886227545, |
| "grad_norm": 1.0731440571023292, |
| "learning_rate": 5.609865372329461e-06, |
| "loss": 0.0968, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.9251497005988024, |
| "grad_norm": 1.0478169134439905, |
| "learning_rate": 5.586519375030549e-06, |
| "loss": 0.1082, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.9281437125748503, |
| "grad_norm": 1.0407654799348711, |
| "learning_rate": 5.5631604050911354e-06, |
| "loss": 0.0989, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.9311377245508982, |
| "grad_norm": 1.0508551890563849, |
| "learning_rate": 5.539788979165115e-06, |
| "loss": 0.1013, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.9341317365269461, |
| "grad_norm": 0.923954532579174, |
| "learning_rate": 5.516405614181883e-06, |
| "loss": 0.0973, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.937125748502994, |
| "grad_norm": 0.8966214747374445, |
| "learning_rate": 5.4930108273349034e-06, |
| "loss": 0.0842, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.9401197604790419, |
| "grad_norm": 1.0545356732072688, |
| "learning_rate": 5.4696051360702725e-06, |
| "loss": 0.1036, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.9431137724550899, |
| "grad_norm": 1.2592838120797554, |
| "learning_rate": 5.446189058075265e-06, |
| "loss": 0.1175, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.9461077844311377, |
| "grad_norm": 1.147053941416306, |
| "learning_rate": 5.4227631112668955e-06, |
| "loss": 0.1093, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.9491017964071856, |
| "grad_norm": 0.9646520479241986, |
| "learning_rate": 5.39932781378045e-06, |
| "loss": 0.0841, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.9520958083832335, |
| "grad_norm": 1.0575832107417094, |
| "learning_rate": 5.375883683958041e-06, |
| "loss": 0.1116, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.9550898203592815, |
| "grad_norm": 0.9810315231097588, |
| "learning_rate": 5.3524312403371255e-06, |
| "loss": 0.0939, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.9580838323353293, |
| "grad_norm": 1.0279268894390523, |
| "learning_rate": 5.328971001639054e-06, |
| "loss": 0.0998, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.9610778443113772, |
| "grad_norm": 1.136417758736507, |
| "learning_rate": 5.3055034867575825e-06, |
| "loss": 0.1095, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.9640718562874252, |
| "grad_norm": 1.153488132682876, |
| "learning_rate": 5.282029214747404e-06, |
| "loss": 0.1081, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.9670658682634731, |
| "grad_norm": 1.2048728627853904, |
| "learning_rate": 5.258548704812667e-06, |
| "loss": 0.111, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.9700598802395209, |
| "grad_norm": 1.1484151553985187, |
| "learning_rate": 5.235062476295488e-06, |
| "loss": 0.1125, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.9730538922155688, |
| "grad_norm": 1.1343182106364535, |
| "learning_rate": 5.211571048664469e-06, |
| "loss": 0.093, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.9760479041916168, |
| "grad_norm": 0.9845672359142281, |
| "learning_rate": 5.188074941503203e-06, |
| "loss": 0.1015, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.9790419161676647, |
| "grad_norm": 1.165160638091069, |
| "learning_rate": 5.164574674498788e-06, |
| "loss": 0.1028, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.9820359281437125, |
| "grad_norm": 1.0164063834652908, |
| "learning_rate": 5.141070767430331e-06, |
| "loss": 0.0986, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.9850299401197605, |
| "grad_norm": 1.1404188476330233, |
| "learning_rate": 5.117563740157444e-06, |
| "loss": 0.1035, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.9880239520958084, |
| "grad_norm": 1.1740883832722306, |
| "learning_rate": 5.094054112608758e-06, |
| "loss": 0.1081, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.9910179640718563, |
| "grad_norm": 1.0738025982568447, |
| "learning_rate": 5.070542404770413e-06, |
| "loss": 0.1061, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.9940119760479041, |
| "grad_norm": 0.926438169582587, |
| "learning_rate": 5.047029136674563e-06, |
| "loss": 0.088, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.9970059880239521, |
| "grad_norm": 1.418334686291164, |
| "learning_rate": 5.023514828387868e-06, |
| "loss": 0.1334, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.9103765676053909, |
| "learning_rate": 5e-06, |
| "loss": 0.0752, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.0029940119760479, |
| "grad_norm": 0.7920123120809723, |
| "learning_rate": 4.976485171612134e-06, |
| "loss": 0.0498, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.0059880239520957, |
| "grad_norm": 0.7645844304656827, |
| "learning_rate": 4.95297086332544e-06, |
| "loss": 0.049, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.0089820359281436, |
| "grad_norm": 0.6769348281909342, |
| "learning_rate": 4.9294575952295896e-06, |
| "loss": 0.0428, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.0119760479041917, |
| "grad_norm": 0.7200135230361624, |
| "learning_rate": 4.905945887391242e-06, |
| "loss": 0.0456, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.0149700598802396, |
| "grad_norm": 0.6729442176900161, |
| "learning_rate": 4.882436259842556e-06, |
| "loss": 0.0373, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.0179640718562875, |
| "grad_norm": 0.6971669698075075, |
| "learning_rate": 4.858929232569671e-06, |
| "loss": 0.0422, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.0209580838323353, |
| "grad_norm": 0.7495540742531779, |
| "learning_rate": 4.835425325501214e-06, |
| "loss": 0.0498, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.0239520958083832, |
| "grad_norm": 0.6366168809607379, |
| "learning_rate": 4.811925058496799e-06, |
| "loss": 0.0401, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.026946107784431, |
| "grad_norm": 0.7222098169912883, |
| "learning_rate": 4.788428951335534e-06, |
| "loss": 0.035, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.029940119760479, |
| "grad_norm": 0.7836392148736803, |
| "learning_rate": 4.7649375237045135e-06, |
| "loss": 0.044, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.032934131736527, |
| "grad_norm": 0.5574391641162871, |
| "learning_rate": 4.741451295187333e-06, |
| "loss": 0.0264, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.035928143712575, |
| "grad_norm": 0.7628970798704946, |
| "learning_rate": 4.717970785252596e-06, |
| "loss": 0.0415, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.0389221556886228, |
| "grad_norm": 0.8667402571860164, |
| "learning_rate": 4.694496513242418e-06, |
| "loss": 0.0375, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.0419161676646707, |
| "grad_norm": 0.6670803258208319, |
| "learning_rate": 4.671028998360947e-06, |
| "loss": 0.0288, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.0449101796407185, |
| "grad_norm": 0.6991849065877793, |
| "learning_rate": 4.647568759662876e-06, |
| "loss": 0.0341, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.0479041916167664, |
| "grad_norm": 0.9326177137319185, |
| "learning_rate": 4.624116316041962e-06, |
| "loss": 0.0427, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.0508982035928143, |
| "grad_norm": 0.7317921993459834, |
| "learning_rate": 4.600672186219551e-06, |
| "loss": 0.0279, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.0538922155688624, |
| "grad_norm": 0.936307056378493, |
| "learning_rate": 4.5772368887331044e-06, |
| "loss": 0.0333, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.0568862275449102, |
| "grad_norm": 0.9429289119624542, |
| "learning_rate": 4.553810941924735e-06, |
| "loss": 0.044, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.0598802395209581, |
| "grad_norm": 0.929022486839685, |
| "learning_rate": 4.530394863929728e-06, |
| "loss": 0.0404, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.062874251497006, |
| "grad_norm": 0.920787814386675, |
| "learning_rate": 4.506989172665097e-06, |
| "loss": 0.0411, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.0658682634730539, |
| "grad_norm": 0.9822944109810009, |
| "learning_rate": 4.483594385818119e-06, |
| "loss": 0.0411, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.0688622754491017, |
| "grad_norm": 1.0776428732380365, |
| "learning_rate": 4.460211020834887e-06, |
| "loss": 0.0443, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.0718562874251496, |
| "grad_norm": 0.9242680121191666, |
| "learning_rate": 4.436839594908866e-06, |
| "loss": 0.037, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.0748502994011977, |
| "grad_norm": 1.102736384384949, |
| "learning_rate": 4.4134806249694514e-06, |
| "loss": 0.0314, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.0778443113772456, |
| "grad_norm": 0.9586978115919587, |
| "learning_rate": 4.39013462767054e-06, |
| "loss": 0.0379, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.0808383233532934, |
| "grad_norm": 0.8744433388997015, |
| "learning_rate": 4.366802119379098e-06, |
| "loss": 0.0407, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.0838323353293413, |
| "grad_norm": 0.8245451265781271, |
| "learning_rate": 4.34348361616374e-06, |
| "loss": 0.034, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.0868263473053892, |
| "grad_norm": 1.1068030098172257, |
| "learning_rate": 4.3201796337833165e-06, |
| "loss": 0.0416, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.089820359281437, |
| "grad_norm": 0.7777816793906646, |
| "learning_rate": 4.29689068767551e-06, |
| "loss": 0.0323, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.092814371257485, |
| "grad_norm": 1.0250067844793325, |
| "learning_rate": 4.273617292945425e-06, |
| "loss": 0.0513, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.095808383233533, |
| "grad_norm": 0.845654890529388, |
| "learning_rate": 4.250359964354203e-06, |
| "loss": 0.0374, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.098802395209581, |
| "grad_norm": 0.8964258556108368, |
| "learning_rate": 4.227119216307637e-06, |
| "loss": 0.0367, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.1017964071856288, |
| "grad_norm": 0.7868435665620686, |
| "learning_rate": 4.203895562844789e-06, |
| "loss": 0.0336, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.1047904191616766, |
| "grad_norm": 0.8457116061447782, |
| "learning_rate": 4.18068951762662e-06, |
| "loss": 0.0293, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.1077844311377245, |
| "grad_norm": 0.7530274777405538, |
| "learning_rate": 4.157501593924638e-06, |
| "loss": 0.0367, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.1107784431137724, |
| "grad_norm": 0.9776857778183234, |
| "learning_rate": 4.134332304609533e-06, |
| "loss": 0.0443, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.1137724550898203, |
| "grad_norm": 0.6793113738947543, |
| "learning_rate": 4.111182162139844e-06, |
| "loss": 0.0319, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.1167664670658684, |
| "grad_norm": 0.7881838904063722, |
| "learning_rate": 4.088051678550617e-06, |
| "loss": 0.0315, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.1197604790419162, |
| "grad_norm": 0.862318278207734, |
| "learning_rate": 4.064941365442084e-06, |
| "loss": 0.0371, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.122754491017964, |
| "grad_norm": 1.0744855612366662, |
| "learning_rate": 4.041851733968348e-06, |
| "loss": 0.0513, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.125748502994012, |
| "grad_norm": 0.914922903066341, |
| "learning_rate": 4.018783294826071e-06, |
| "loss": 0.042, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.1287425149700598, |
| "grad_norm": 0.933269550307153, |
| "learning_rate": 3.995736558243186e-06, |
| "loss": 0.0392, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.1317365269461077, |
| "grad_norm": 0.9029916694162012, |
| "learning_rate": 3.972712033967608e-06, |
| "loss": 0.0465, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.1347305389221556, |
| "grad_norm": 0.859954495431855, |
| "learning_rate": 3.949710231255961e-06, |
| "loss": 0.0484, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.1377245508982037, |
| "grad_norm": 0.7531777410274714, |
| "learning_rate": 3.926731658862307e-06, |
| "loss": 0.0329, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.1407185628742516, |
| "grad_norm": 0.7087909296607872, |
| "learning_rate": 3.903776825026912e-06, |
| "loss": 0.0337, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.1437125748502994, |
| "grad_norm": 0.7330388221118903, |
| "learning_rate": 3.8808462374649805e-06, |
| "loss": 0.0372, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.1467065868263473, |
| "grad_norm": 0.7368243151394032, |
| "learning_rate": 3.857940403355444e-06, |
| "loss": 0.0373, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.1497005988023952, |
| "grad_norm": 0.727158340191875, |
| "learning_rate": 3.8350598293297345e-06, |
| "loss": 0.0371, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.152694610778443, |
| "grad_norm": 0.9195286471944959, |
| "learning_rate": 3.8122050214605822e-06, |
| "loss": 0.0421, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.1556886227544911, |
| "grad_norm": 0.8716150110798513, |
| "learning_rate": 3.7893764852508207e-06, |
| "loss": 0.0431, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.158682634730539, |
| "grad_norm": 0.7216158638005744, |
| "learning_rate": 3.766574725622208e-06, |
| "loss": 0.0307, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.1616766467065869, |
| "grad_norm": 0.7001316533601489, |
| "learning_rate": 3.7438002469042567e-06, |
| "loss": 0.0287, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.1646706586826348, |
| "grad_norm": 0.7904658156314911, |
| "learning_rate": 3.721053552823078e-06, |
| "loss": 0.0335, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.1676646706586826, |
| "grad_norm": 0.7175422431712003, |
| "learning_rate": 3.698335146490246e-06, |
| "loss": 0.0324, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.1706586826347305, |
| "grad_norm": 0.7842272991353707, |
| "learning_rate": 3.675645530391665e-06, |
| "loss": 0.0294, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.1736526946107784, |
| "grad_norm": 0.961378079601426, |
| "learning_rate": 3.652985206376455e-06, |
| "loss": 0.0361, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.1766467065868262, |
| "grad_norm": 0.9744359586687764, |
| "learning_rate": 3.630354675645853e-06, |
| "loss": 0.0412, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.1796407185628743, |
| "grad_norm": 0.8939009027709087, |
| "learning_rate": 3.6077544387421293e-06, |
| "loss": 0.0377, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.1826347305389222, |
| "grad_norm": 0.6647428987195185, |
| "learning_rate": 3.5851849955375177e-06, |
| "loss": 0.0297, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.18562874251497, |
| "grad_norm": 0.7347697572783929, |
| "learning_rate": 3.5626468452231534e-06, |
| "loss": 0.0272, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.188622754491018, |
| "grad_norm": 0.8121052793901488, |
| "learning_rate": 3.540140486298035e-06, |
| "loss": 0.0326, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.1916167664670658, |
| "grad_norm": 0.9567500286690479, |
| "learning_rate": 3.517666416557999e-06, |
| "loss": 0.0451, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.1946107784431137, |
| "grad_norm": 0.661783630411826, |
| "learning_rate": 3.495225133084712e-06, |
| "loss": 0.0252, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.1976047904191618, |
| "grad_norm": 0.6375626484149183, |
| "learning_rate": 3.472817132234669e-06, |
| "loss": 0.0223, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.1976047904191618, |
| "eval_loss": 0.12274425476789474, |
| "eval_runtime": 8.3056, |
| "eval_samples_per_second": 6.502, |
| "eval_steps_per_second": 1.686, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.2005988023952097, |
| "grad_norm": 0.8482845892477424, |
| "learning_rate": 3.4504429096282246e-06, |
| "loss": 0.0356, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.2035928143712575, |
| "grad_norm": 0.8532014024276183, |
| "learning_rate": 3.428102960138625e-06, |
| "loss": 0.0387, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.2065868263473054, |
| "grad_norm": 0.95367515220183, |
| "learning_rate": 3.405797777881059e-06, |
| "loss": 0.0432, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.2095808383233533, |
| "grad_norm": 0.7228544990492038, |
| "learning_rate": 3.3835278562017405e-06, |
| "loss": 0.028, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.2125748502994012, |
| "grad_norm": 0.8264683521593323, |
| "learning_rate": 3.3612936876669834e-06, |
| "loss": 0.0376, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.215568862275449, |
| "grad_norm": 0.8962344956030094, |
| "learning_rate": 3.3390957640523147e-06, |
| "loss": 0.0397, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.218562874251497, |
| "grad_norm": 0.7516331406177484, |
| "learning_rate": 3.3169345763315986e-06, |
| "loss": 0.0298, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.221556886227545, |
| "grad_norm": 0.8997824255484812, |
| "learning_rate": 3.29481061466617e-06, |
| "loss": 0.043, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.2245508982035929, |
| "grad_norm": 0.9155261698983177, |
| "learning_rate": 3.2727243683940045e-06, |
| "loss": 0.0342, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.2275449101796407, |
| "grad_norm": 0.7441360133603632, |
| "learning_rate": 3.2506763260188824e-06, |
| "loss": 0.0299, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.2305389221556886, |
| "grad_norm": 0.6227423061521378, |
| "learning_rate": 3.2286669751995905e-06, |
| "loss": 0.0272, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.2335329341317365, |
| "grad_norm": 0.8789385458345906, |
| "learning_rate": 3.2066968027391377e-06, |
| "loss": 0.0317, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.2365269461077844, |
| "grad_norm": 0.6498772967966782, |
| "learning_rate": 3.1847662945739833e-06, |
| "loss": 0.0251, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.2395209580838324, |
| "grad_norm": 0.6806058535848892, |
| "learning_rate": 3.1628759357632943e-06, |
| "loss": 0.0272, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.2425149700598803, |
| "grad_norm": 0.7405883946592593, |
| "learning_rate": 3.1410262104782086e-06, |
| "loss": 0.0312, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.2455089820359282, |
| "grad_norm": 0.8289539441670357, |
| "learning_rate": 3.119217601991139e-06, |
| "loss": 0.0326, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.248502994011976, |
| "grad_norm": 0.8212534436158723, |
| "learning_rate": 3.0974505926650724e-06, |
| "loss": 0.0374, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.251497005988024, |
| "grad_norm": 0.9186883114815179, |
| "learning_rate": 3.0757256639429027e-06, |
| "loss": 0.0477, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.2544910179640718, |
| "grad_norm": 0.7857164567317839, |
| "learning_rate": 3.0540432963367907e-06, |
| "loss": 0.0338, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.2574850299401197, |
| "grad_norm": 0.7416241333181448, |
| "learning_rate": 3.032403969417523e-06, |
| "loss": 0.0295, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.2604790419161676, |
| "grad_norm": 0.6296875419732539, |
| "learning_rate": 3.010808161803917e-06, |
| "loss": 0.0279, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.2634730538922156, |
| "grad_norm": 0.858388360398613, |
| "learning_rate": 2.9892563511522305e-06, |
| "loss": 0.0411, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.2664670658682635, |
| "grad_norm": 0.7311365145504828, |
| "learning_rate": 2.9677490141455915e-06, |
| "loss": 0.0289, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.2694610778443114, |
| "grad_norm": 0.8611729516176496, |
| "learning_rate": 2.946286626483463e-06, |
| "loss": 0.0369, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.2724550898203593, |
| "grad_norm": 0.8638938444837622, |
| "learning_rate": 2.924869662871117e-06, |
| "loss": 0.0331, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.2754491017964071, |
| "grad_norm": 0.7477554937360593, |
| "learning_rate": 2.903498597009136e-06, |
| "loss": 0.0363, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.278443113772455, |
| "grad_norm": 0.7671241329830223, |
| "learning_rate": 2.8821739015829338e-06, |
| "loss": 0.0333, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.281437125748503, |
| "grad_norm": 0.6121159777799046, |
| "learning_rate": 2.8608960482523058e-06, |
| "loss": 0.0242, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.284431137724551, |
| "grad_norm": 0.7573160710717566, |
| "learning_rate": 2.839665507640992e-06, |
| "loss": 0.0328, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.2874251497005988, |
| "grad_norm": 0.7164317510192734, |
| "learning_rate": 2.818482749326272e-06, |
| "loss": 0.0305, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.2904191616766467, |
| "grad_norm": 0.8103144727787737, |
| "learning_rate": 2.797348241828569e-06, |
| "loss": 0.0292, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.2934131736526946, |
| "grad_norm": 0.749983408990304, |
| "learning_rate": 2.776262452601104e-06, |
| "loss": 0.0243, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.2964071856287425, |
| "grad_norm": 0.7860892667640146, |
| "learning_rate": 2.7552258480195348e-06, |
| "loss": 0.0316, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.2994011976047903, |
| "grad_norm": 0.6696413883450865, |
| "learning_rate": 2.734238893371667e-06, |
| "loss": 0.0257, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.3023952095808382, |
| "grad_norm": 0.8456737952611747, |
| "learning_rate": 2.7133020528471322e-06, |
| "loss": 0.0363, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.3053892215568863, |
| "grad_norm": 0.7666486409896474, |
| "learning_rate": 2.6924157895271563e-06, |
| "loss": 0.0334, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.3083832335329342, |
| "grad_norm": 0.8264949977798605, |
| "learning_rate": 2.671580565374282e-06, |
| "loss": 0.0378, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.311377245508982, |
| "grad_norm": 0.8727137273398754, |
| "learning_rate": 2.6507968412221763e-06, |
| "loss": 0.0403, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.31437125748503, |
| "grad_norm": 0.8345367260510321, |
| "learning_rate": 2.6300650767654234e-06, |
| "loss": 0.031, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.3173652694610778, |
| "grad_norm": 0.8998558702359915, |
| "learning_rate": 2.6093857305493666e-06, |
| "loss": 0.0409, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.3203592814371259, |
| "grad_norm": 0.7572176445110357, |
| "learning_rate": 2.588759259959962e-06, |
| "loss": 0.0289, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.3233532934131738, |
| "grad_norm": 0.8788703593742704, |
| "learning_rate": 2.568186121213658e-06, |
| "loss": 0.0358, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.3263473053892216, |
| "grad_norm": 0.8886184474502267, |
| "learning_rate": 2.547666769347312e-06, |
| "loss": 0.0333, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.3293413173652695, |
| "grad_norm": 0.8850229903761628, |
| "learning_rate": 2.5272016582081236e-06, |
| "loss": 0.0321, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.3323353293413174, |
| "grad_norm": 0.8534261499675657, |
| "learning_rate": 2.5067912404435952e-06, |
| "loss": 0.0357, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.3353293413173652, |
| "grad_norm": 0.967076925657212, |
| "learning_rate": 2.486435967491516e-06, |
| "loss": 0.041, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.3383233532934131, |
| "grad_norm": 0.86808042150452, |
| "learning_rate": 2.4661362895699903e-06, |
| "loss": 0.0356, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.341317365269461, |
| "grad_norm": 0.9139637793505679, |
| "learning_rate": 2.445892655667462e-06, |
| "loss": 0.0368, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.3443113772455089, |
| "grad_norm": 0.9330373787227488, |
| "learning_rate": 2.425705513532798e-06, |
| "loss": 0.0438, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.347305389221557, |
| "grad_norm": 0.8994773466836672, |
| "learning_rate": 2.4055753096653795e-06, |
| "loss": 0.0445, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.3502994011976048, |
| "grad_norm": 1.0517765784734638, |
| "learning_rate": 2.3855024893052286e-06, |
| "loss": 0.0429, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.3532934131736527, |
| "grad_norm": 0.8178120057042947, |
| "learning_rate": 2.365487496423152e-06, |
| "loss": 0.035, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.3562874251497006, |
| "grad_norm": 0.8039252618164829, |
| "learning_rate": 2.3455307737109338e-06, |
| "loss": 0.0293, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.3592814371257484, |
| "grad_norm": 0.8098474809654538, |
| "learning_rate": 2.3256327625715345e-06, |
| "loss": 0.038, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.3622754491017965, |
| "grad_norm": 0.6834159216563565, |
| "learning_rate": 2.3057939031093346e-06, |
| "loss": 0.0283, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.3652694610778444, |
| "grad_norm": 0.6657612928843127, |
| "learning_rate": 2.2860146341203936e-06, |
| "loss": 0.0287, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.3682634730538923, |
| "grad_norm": 0.8131771805140413, |
| "learning_rate": 2.2662953930827546e-06, |
| "loss": 0.0387, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.3712574850299402, |
| "grad_norm": 0.6817770464408984, |
| "learning_rate": 2.2466366161467528e-06, |
| "loss": 0.0252, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.374251497005988, |
| "grad_norm": 0.7793573256649142, |
| "learning_rate": 2.227038738125385e-06, |
| "loss": 0.0307, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.377245508982036, |
| "grad_norm": 1.0261060760433192, |
| "learning_rate": 2.207502192484685e-06, |
| "loss": 0.0383, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.3802395209580838, |
| "grad_norm": 0.6348635641826605, |
| "learning_rate": 2.188027411334131e-06, |
| "loss": 0.0228, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.3832335329341316, |
| "grad_norm": 0.8573130652179038, |
| "learning_rate": 2.1686148254171012e-06, |
| "loss": 0.0423, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.3862275449101795, |
| "grad_norm": 0.7806582324146677, |
| "learning_rate": 2.1492648641013305e-06, |
| "loss": 0.0316, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.3892215568862276, |
| "grad_norm": 0.5894221825824172, |
| "learning_rate": 2.1299779553694323e-06, |
| "loss": 0.023, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.3922155688622755, |
| "grad_norm": 0.826535206087488, |
| "learning_rate": 2.1107545258094135e-06, |
| "loss": 0.0366, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.3952095808383234, |
| "grad_norm": 0.9616742775987399, |
| "learning_rate": 2.0915950006052555e-06, |
| "loss": 0.0301, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.3982035928143712, |
| "grad_norm": 0.729862632476751, |
| "learning_rate": 2.0724998035274947e-06, |
| "loss": 0.0267, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.401197604790419, |
| "grad_norm": 0.6637369110872577, |
| "learning_rate": 2.053469356923865e-06, |
| "loss": 0.0263, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.4041916167664672, |
| "grad_norm": 0.6619961912456589, |
| "learning_rate": 2.0345040817099433e-06, |
| "loss": 0.023, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.407185628742515, |
| "grad_norm": 0.729297579432835, |
| "learning_rate": 2.0156043973598475e-06, |
| "loss": 0.0301, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.410179640718563, |
| "grad_norm": 0.7886765442748123, |
| "learning_rate": 1.996770721896957e-06, |
| "loss": 0.0257, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.4131736526946108, |
| "grad_norm": 0.6330705774442724, |
| "learning_rate": 1.9780034718846653e-06, |
| "loss": 0.0223, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.4161676646706587, |
| "grad_norm": 0.9521481226689832, |
| "learning_rate": 1.9593030624171683e-06, |
| "loss": 0.0412, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.4191616766467066, |
| "grad_norm": 0.8013957788507334, |
| "learning_rate": 1.9406699071102774e-06, |
| "loss": 0.0349, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.4221556886227544, |
| "grad_norm": 0.611011430738466, |
| "learning_rate": 1.9221044180922833e-06, |
| "loss": 0.0225, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.4251497005988023, |
| "grad_norm": 0.847951402483942, |
| "learning_rate": 1.9036070059948253e-06, |
| "loss": 0.0337, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.4281437125748502, |
| "grad_norm": 0.914452142873737, |
| "learning_rate": 1.885178079943823e-06, |
| "loss": 0.0364, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.4311377245508983, |
| "grad_norm": 0.9317726028752528, |
| "learning_rate": 1.866818047550419e-06, |
| "loss": 0.0399, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.4341317365269461, |
| "grad_norm": 0.6902701669651811, |
| "learning_rate": 1.8485273149019655e-06, |
| "loss": 0.0278, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.437125748502994, |
| "grad_norm": 0.7767045065124477, |
| "learning_rate": 1.8303062865530407e-06, |
| "loss": 0.026, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.4401197604790419, |
| "grad_norm": 0.7623917999011538, |
| "learning_rate": 1.8121553655165058e-06, |
| "loss": 0.0286, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.4431137724550898, |
| "grad_norm": 0.717065207465707, |
| "learning_rate": 1.7940749532545832e-06, |
| "loss": 0.0293, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.4461077844311379, |
| "grad_norm": 0.8990399187307896, |
| "learning_rate": 1.7760654496699876e-06, |
| "loss": 0.0314, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.4491017964071857, |
| "grad_norm": 0.6593536905788724, |
| "learning_rate": 1.7581272530970666e-06, |
| "loss": 0.0288, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.4520958083832336, |
| "grad_norm": 0.7110876686323473, |
| "learning_rate": 1.7402607602930106e-06, |
| "loss": 0.0315, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.4550898203592815, |
| "grad_norm": 0.855571531618953, |
| "learning_rate": 1.7224663664290537e-06, |
| "loss": 0.0328, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.4580838323353293, |
| "grad_norm": 0.822138534764874, |
| "learning_rate": 1.7047444650817518e-06, |
| "loss": 0.029, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.4610778443113772, |
| "grad_norm": 0.7985007308245561, |
| "learning_rate": 1.6870954482242707e-06, |
| "loss": 0.0278, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.464071856287425, |
| "grad_norm": 0.6529053912134282, |
| "learning_rate": 1.669519706217711e-06, |
| "loss": 0.0231, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.467065868263473, |
| "grad_norm": 1.0775658669244046, |
| "learning_rate": 1.652017627802487e-06, |
| "loss": 0.0404, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.4700598802395208, |
| "grad_norm": 0.7566624267062195, |
| "learning_rate": 1.6345896000897122e-06, |
| "loss": 0.0264, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.473053892215569, |
| "grad_norm": 0.889393201746035, |
| "learning_rate": 1.6172360085526567e-06, |
| "loss": 0.0364, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.4760479041916168, |
| "grad_norm": 0.7896130226159147, |
| "learning_rate": 1.5999572370182016e-06, |
| "loss": 0.0318, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.4790419161676647, |
| "grad_norm": 0.7833912705448801, |
| "learning_rate": 1.5827536676583643e-06, |
| "loss": 0.0321, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.4820359281437125, |
| "grad_norm": 0.7765840124546369, |
| "learning_rate": 1.5656256809818343e-06, |
| "loss": 0.0286, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.4850299401197604, |
| "grad_norm": 0.7209927182279341, |
| "learning_rate": 1.54857365582557e-06, |
| "loss": 0.0282, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.4880239520958085, |
| "grad_norm": 0.9815242098107917, |
| "learning_rate": 1.5315979693464039e-06, |
| "loss": 0.0462, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.4910179640718564, |
| "grad_norm": 0.7869024478324081, |
| "learning_rate": 1.5146989970127158e-06, |
| "loss": 0.0263, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.4940119760479043, |
| "grad_norm": 0.8160274706586781, |
| "learning_rate": 1.4978771125961177e-06, |
| "loss": 0.0297, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.4970059880239521, |
| "grad_norm": 0.7805482932790326, |
| "learning_rate": 1.4811326881631937e-06, |
| "loss": 0.0333, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.9558030047904206, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.0363, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.5029940119760479, |
| "grad_norm": 1.0100842926631917, |
| "learning_rate": 1.4478776989401949e-06, |
| "loss": 0.0305, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.5059880239520957, |
| "grad_norm": 0.8435445692139016, |
| "learning_rate": 1.4313678696842559e-06, |
| "loss": 0.0316, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.5089820359281436, |
| "grad_norm": 0.7106837250190794, |
| "learning_rate": 1.4149369714639856e-06, |
| "loss": 0.0296, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.5119760479041915, |
| "grad_norm": 0.7739948521366974, |
| "learning_rate": 1.3985853676981316e-06, |
| "loss": 0.0279, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.5149700598802394, |
| "grad_norm": 0.8117653947864281, |
| "learning_rate": 1.3823134200516043e-06, |
| "loss": 0.0309, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.5179640718562875, |
| "grad_norm": 0.9087591195077263, |
| "learning_rate": 1.366121488427481e-06, |
| "loss": 0.0384, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.5209580838323353, |
| "grad_norm": 0.7686143275900593, |
| "learning_rate": 1.3500099309590397e-06, |
| "loss": 0.0294, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.5239520958083832, |
| "grad_norm": 0.8931968006552831, |
| "learning_rate": 1.3339791040018479e-06, |
| "loss": 0.0308, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.5269461077844313, |
| "grad_norm": 0.8868404871926984, |
| "learning_rate": 1.3180293621258694e-06, |
| "loss": 0.0353, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.5299401197604792, |
| "grad_norm": 0.758248005845863, |
| "learning_rate": 1.3021610581076316e-06, |
| "loss": 0.0297, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.532934131736527, |
| "grad_norm": 0.8672445860829112, |
| "learning_rate": 1.2863745429224145e-06, |
| "loss": 0.0288, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.535928143712575, |
| "grad_norm": 0.7675559253844609, |
| "learning_rate": 1.270670165736499e-06, |
| "loss": 0.0322, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.5389221556886228, |
| "grad_norm": 0.9548904782356388, |
| "learning_rate": 1.2550482738994284e-06, |
| "loss": 0.0402, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.5419161676646707, |
| "grad_norm": 0.8462083353048224, |
| "learning_rate": 1.239509212936343e-06, |
| "loss": 0.0404, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.5449101796407185, |
| "grad_norm": 0.7254937332397977, |
| "learning_rate": 1.22405332654032e-06, |
| "loss": 0.0283, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.5479041916167664, |
| "grad_norm": 0.8570120599090103, |
| "learning_rate": 1.2086809565647877e-06, |
| "loss": 0.0341, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.5508982035928143, |
| "grad_norm": 0.7360277266798015, |
| "learning_rate": 1.1933924430159571e-06, |
| "loss": 0.0292, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.5538922155688621, |
| "grad_norm": 0.8117403429158275, |
| "learning_rate": 1.1781881240452958e-06, |
| "loss": 0.0287, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.55688622754491, |
| "grad_norm": 0.7776108690426635, |
| "learning_rate": 1.1630683359420653e-06, |
| "loss": 0.0336, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.5598802395209581, |
| "grad_norm": 0.7674741930868076, |
| "learning_rate": 1.1480334131258626e-06, |
| "loss": 0.0322, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.562874251497006, |
| "grad_norm": 0.9943192478363255, |
| "learning_rate": 1.1330836881392405e-06, |
| "loss": 0.0386, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.5658682634730539, |
| "grad_norm": 0.7389979799147285, |
| "learning_rate": 1.11821949164034e-06, |
| "loss": 0.0268, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.568862275449102, |
| "grad_norm": 0.6633213086550153, |
| "learning_rate": 1.103441152395588e-06, |
| "loss": 0.0237, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.5718562874251498, |
| "grad_norm": 0.7593865064786751, |
| "learning_rate": 1.088748997272414e-06, |
| "loss": 0.0289, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.5748502994011977, |
| "grad_norm": 0.9226523367651599, |
| "learning_rate": 1.0741433512320316e-06, |
| "loss": 0.034, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.5778443113772456, |
| "grad_norm": 0.73814752667089, |
| "learning_rate": 1.0596245373222424e-06, |
| "loss": 0.0248, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.5808383233532934, |
| "grad_norm": 0.8591782256330072, |
| "learning_rate": 1.045192876670298e-06, |
| "loss": 0.036, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.5838323353293413, |
| "grad_norm": 0.8078932915516085, |
| "learning_rate": 1.0308486884757868e-06, |
| "loss": 0.0288, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.5868263473053892, |
| "grad_norm": 0.8422027954576564, |
| "learning_rate": 1.0165922900035886e-06, |
| "loss": 0.032, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.589820359281437, |
| "grad_norm": 0.6402676123892606, |
| "learning_rate": 1.0024239965768417e-06, |
| "loss": 0.0284, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.592814371257485, |
| "grad_norm": 0.6932599188526581, |
| "learning_rate": 9.883441215699824e-07, |
| "loss": 0.0225, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.5958083832335328, |
| "grad_norm": 0.8265428761855518, |
| "learning_rate": 9.74352976401805e-07, |
| "loss": 0.0288, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.5988023952095807, |
| "grad_norm": 0.8598560545298901, |
| "learning_rate": 9.604508705285765e-07, |
| "loss": 0.0338, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.6017964071856288, |
| "grad_norm": 0.7342467342475081, |
| "learning_rate": 9.466381114371942e-07, |
| "loss": 0.03, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.6047904191616766, |
| "grad_norm": 0.8819889873555795, |
| "learning_rate": 9.329150046383773e-07, |
| "loss": 0.037, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.6077844311377245, |
| "grad_norm": 0.6933969738630927, |
| "learning_rate": 9.192818536599213e-07, |
| "loss": 0.0266, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.6107784431137726, |
| "grad_norm": 0.7635103122063983, |
| "learning_rate": 9.057389600399719e-07, |
| "loss": 0.032, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.6137724550898205, |
| "grad_norm": 0.8093499940109913, |
| "learning_rate": 8.922866233203681e-07, |
| "loss": 0.0373, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.6167664670658684, |
| "grad_norm": 0.670879961016856, |
| "learning_rate": 8.789251410400024e-07, |
| "loss": 0.0275, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.6197604790419162, |
| "grad_norm": 0.7502571831965572, |
| "learning_rate": 8.65654808728259e-07, |
| "loss": 0.0311, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.622754491017964, |
| "grad_norm": 0.7501339998587506, |
| "learning_rate": 8.524759198984567e-07, |
| "loss": 0.0265, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.625748502994012, |
| "grad_norm": 0.9965584980768845, |
| "learning_rate": 8.393887660413719e-07, |
| "loss": 0.0382, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.6287425149700598, |
| "grad_norm": 1.0608902045933304, |
| "learning_rate": 8.263936366187825e-07, |
| "loss": 0.0457, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.6317365269461077, |
| "grad_norm": 0.8491007047520747, |
| "learning_rate": 8.134908190570723e-07, |
| "loss": 0.0398, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.6347305389221556, |
| "grad_norm": 0.8087918069570493, |
| "learning_rate": 8.006805987408705e-07, |
| "loss": 0.0317, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.6377245508982035, |
| "grad_norm": 0.7380314097200338, |
| "learning_rate": 7.879632590067354e-07, |
| "loss": 0.0315, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.6407185628742516, |
| "grad_norm": 0.7152291509501483, |
| "learning_rate": 7.753390811368972e-07, |
| "loss": 0.0237, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.6437125748502994, |
| "grad_norm": 0.9032206741121168, |
| "learning_rate": 7.628083443530287e-07, |
| "loss": 0.0341, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.6467065868263473, |
| "grad_norm": 0.791470160856038, |
| "learning_rate": 7.503713258100726e-07, |
| "loss": 0.0295, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.6497005988023952, |
| "grad_norm": 0.8982642421455997, |
| "learning_rate": 7.380283005901084e-07, |
| "loss": 0.0405, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.6526946107784433, |
| "grad_norm": 0.7479241952338301, |
| "learning_rate": 7.257795416962754e-07, |
| "loss": 0.0289, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.6556886227544911, |
| "grad_norm": 0.5945504219676864, |
| "learning_rate": 7.136253200467231e-07, |
| "loss": 0.0177, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.658682634730539, |
| "grad_norm": 0.7374110072286232, |
| "learning_rate": 7.015659044686307e-07, |
| "loss": 0.0244, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.6616766467065869, |
| "grad_norm": 0.7182229108008051, |
| "learning_rate": 6.896015616922535e-07, |
| "loss": 0.0271, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.6646706586826348, |
| "grad_norm": 0.8175851271732705, |
| "learning_rate": 6.777325563450282e-07, |
| "loss": 0.0325, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.6676646706586826, |
| "grad_norm": 0.8060955271645127, |
| "learning_rate": 6.659591509457125e-07, |
| "loss": 0.0309, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.6706586826347305, |
| "grad_norm": 0.749762273978849, |
| "learning_rate": 6.542816058985896e-07, |
| "loss": 0.0263, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.6736526946107784, |
| "grad_norm": 0.8326712833404715, |
| "learning_rate": 6.427001794876974e-07, |
| "loss": 0.0309, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.6766467065868262, |
| "grad_norm": 0.7299755313881022, |
| "learning_rate": 6.312151278711237e-07, |
| "loss": 0.0278, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.6796407185628741, |
| "grad_norm": 0.8931889418235481, |
| "learning_rate": 6.198267050753387e-07, |
| "loss": 0.0331, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.6826347305389222, |
| "grad_norm": 0.7047277203420241, |
| "learning_rate": 6.085351629895736e-07, |
| "loss": 0.0268, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.68562874251497, |
| "grad_norm": 0.7998674663222014, |
| "learning_rate": 5.973407513602514e-07, |
| "loss": 0.0286, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.688622754491018, |
| "grad_norm": 0.9024059485355096, |
| "learning_rate": 5.862437177854629e-07, |
| "loss": 0.0367, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.6916167664670658, |
| "grad_norm": 0.8339021894551686, |
| "learning_rate": 5.752443077094927e-07, |
| "loss": 0.0339, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.694610778443114, |
| "grad_norm": 0.6966678288107665, |
| "learning_rate": 5.643427644173838e-07, |
| "loss": 0.0267, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.6976047904191618, |
| "grad_norm": 0.877469773656579, |
| "learning_rate": 5.535393290295643e-07, |
| "loss": 0.0321, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.7005988023952097, |
| "grad_norm": 0.7395916979542717, |
| "learning_rate": 5.428342404965076e-07, |
| "loss": 0.0269, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.7035928143712575, |
| "grad_norm": 0.6522625029162095, |
| "learning_rate": 5.322277355934557e-07, |
| "loss": 0.0233, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.7065868263473054, |
| "grad_norm": 0.673364217725514, |
| "learning_rate": 5.217200489151714e-07, |
| "loss": 0.022, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.7095808383233533, |
| "grad_norm": 0.6746275844631606, |
| "learning_rate": 5.113114128707592e-07, |
| "loss": 0.0249, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.7125748502994012, |
| "grad_norm": 0.8565551734630578, |
| "learning_rate": 5.010020576785174e-07, |
| "loss": 0.0363, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.715568862275449, |
| "grad_norm": 0.8164502086251159, |
| "learning_rate": 4.907922113608532e-07, |
| "loss": 0.0272, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.718562874251497, |
| "grad_norm": 0.8938217759994574, |
| "learning_rate": 4.806820997392325e-07, |
| "loss": 0.0342, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.7215568862275448, |
| "grad_norm": 0.6524794927709131, |
| "learning_rate": 4.7067194642919036e-07, |
| "loss": 0.0209, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.7245508982035929, |
| "grad_norm": 0.7259599873949754, |
| "learning_rate": 4.607619728353818e-07, |
| "loss": 0.026, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.7275449101796407, |
| "grad_norm": 0.8063411056617391, |
| "learning_rate": 4.50952398146689e-07, |
| "loss": 0.0306, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.7305389221556886, |
| "grad_norm": 0.7265239467576244, |
| "learning_rate": 4.4124343933136525e-07, |
| "loss": 0.0256, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.7335329341317365, |
| "grad_norm": 0.9376866641359151, |
| "learning_rate": 4.3163531113224466e-07, |
| "loss": 0.0288, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.7365269461077846, |
| "grad_norm": 0.9201568401188499, |
| "learning_rate": 4.221282260619891e-07, |
| "loss": 0.0346, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.7395209580838324, |
| "grad_norm": 0.6363801799371435, |
| "learning_rate": 4.127223943983849e-07, |
| "loss": 0.0267, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.7425149700598803, |
| "grad_norm": 0.764412207543806, |
| "learning_rate": 4.03418024179697e-07, |
| "loss": 0.0256, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.7455089820359282, |
| "grad_norm": 0.9655544882389034, |
| "learning_rate": 3.9421532120006544e-07, |
| "loss": 0.0373, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.748502994011976, |
| "grad_norm": 0.6673030026053577, |
| "learning_rate": 3.851144890049535e-07, |
| "loss": 0.0262, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.751497005988024, |
| "grad_norm": 0.6839874633904656, |
| "learning_rate": 3.761157288866418e-07, |
| "loss": 0.0252, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.7544910179640718, |
| "grad_norm": 0.7786676439718657, |
| "learning_rate": 3.672192398797858e-07, |
| "loss": 0.0308, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.7574850299401197, |
| "grad_norm": 0.7584560601242782, |
| "learning_rate": 3.58425218757002e-07, |
| "loss": 0.024, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.7604790419161676, |
| "grad_norm": 0.7510318107946398, |
| "learning_rate": 3.497338600245254e-07, |
| "loss": 0.0222, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.7634730538922154, |
| "grad_norm": 0.7250295300427998, |
| "learning_rate": 3.4114535591790233e-07, |
| "loss": 0.0251, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.7664670658682635, |
| "grad_norm": 0.8390580850015997, |
| "learning_rate": 3.326598963977395e-07, |
| "loss": 0.0239, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.7694610778443114, |
| "grad_norm": 0.6886547360013243, |
| "learning_rate": 3.242776691455013e-07, |
| "loss": 0.0242, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.7724550898203593, |
| "grad_norm": 0.978824922086262, |
| "learning_rate": 3.159988595593616e-07, |
| "loss": 0.0271, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.7754491017964071, |
| "grad_norm": 0.5475118355720677, |
| "learning_rate": 3.078236507501015e-07, |
| "loss": 0.0233, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.7784431137724552, |
| "grad_norm": 0.9053987470674856, |
| "learning_rate": 2.9975222353705757e-07, |
| "loss": 0.0411, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.781437125748503, |
| "grad_norm": 0.8040444816314111, |
| "learning_rate": 2.917847564441256e-07, |
| "loss": 0.0272, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.784431137724551, |
| "grad_norm": 0.7560893065684454, |
| "learning_rate": 2.839214256958106e-07, |
| "loss": 0.0286, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.7874251497005988, |
| "grad_norm": 0.6813116340105736, |
| "learning_rate": 2.7616240521332884e-07, |
| "loss": 0.0279, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.7904191616766467, |
| "grad_norm": 0.7818721757749574, |
| "learning_rate": 2.6850786661076047e-07, |
| "loss": 0.028, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.7934131736526946, |
| "grad_norm": 0.7833483284532683, |
| "learning_rate": 2.6095797919125533e-07, |
| "loss": 0.0301, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.7964071856287425, |
| "grad_norm": 0.8074948988559361, |
| "learning_rate": 2.5351290994328703e-07, |
| "loss": 0.032, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.7964071856287425, |
| "eval_loss": 0.11569614708423615, |
| "eval_runtime": 8.3105, |
| "eval_samples_per_second": 6.498, |
| "eval_steps_per_second": 1.685, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.7994011976047903, |
| "grad_norm": 0.7627295967716242, |
| "learning_rate": 2.4617282353696093e-07, |
| "loss": 0.0316, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.8023952095808382, |
| "grad_norm": 1.0351380615872117, |
| "learning_rate": 2.3893788232036807e-07, |
| "loss": 0.0414, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.805389221556886, |
| "grad_norm": 0.8254307879341827, |
| "learning_rate": 2.318082463160032e-07, |
| "loss": 0.0357, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.8083832335329342, |
| "grad_norm": 0.621299796743885, |
| "learning_rate": 2.2478407321721295e-07, |
| "loss": 0.0217, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.811377245508982, |
| "grad_norm": 0.7997985341671922, |
| "learning_rate": 2.1786551838471892e-07, |
| "loss": 0.0302, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.81437125748503, |
| "grad_norm": 0.7000903319246224, |
| "learning_rate": 2.1105273484317402e-07, |
| "loss": 0.0265, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.8173652694610778, |
| "grad_norm": 0.6705282788765942, |
| "learning_rate": 2.043458732777831e-07, |
| "loss": 0.0273, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.8203592814371259, |
| "grad_norm": 0.9406181937130907, |
| "learning_rate": 1.9774508203096843e-07, |
| "loss": 0.0363, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.8233532934131738, |
| "grad_norm": 0.6881737824493238, |
| "learning_rate": 1.9125050709908388e-07, |
| "loss": 0.0212, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.8263473053892216, |
| "grad_norm": 0.7261710446004571, |
| "learning_rate": 1.8486229212919482e-07, |
| "loss": 0.0271, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.8293413173652695, |
| "grad_norm": 0.6753904490937951, |
| "learning_rate": 1.7858057841589281e-07, |
| "loss": 0.0246, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.8323353293413174, |
| "grad_norm": 0.800972682165409, |
| "learning_rate": 1.7240550489817652e-07, |
| "loss": 0.0273, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.8353293413173652, |
| "grad_norm": 0.8856638741023288, |
| "learning_rate": 1.66337208156373e-07, |
| "loss": 0.0257, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.8383233532934131, |
| "grad_norm": 0.848123138869985, |
| "learning_rate": 1.6037582240912175e-07, |
| "loss": 0.0283, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.841317365269461, |
| "grad_norm": 0.8153695174447103, |
| "learning_rate": 1.5452147951040165e-07, |
| "loss": 0.0321, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.8443113772455089, |
| "grad_norm": 0.8507387540239293, |
| "learning_rate": 1.4877430894662037e-07, |
| "loss": 0.0319, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.8473053892215567, |
| "grad_norm": 0.7469927587716587, |
| "learning_rate": 1.4313443783374405e-07, |
| "loss": 0.0257, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.8502994011976048, |
| "grad_norm": 0.8319162549138669, |
| "learning_rate": 1.3760199091449045e-07, |
| "loss": 0.0286, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.8532934131736527, |
| "grad_norm": 0.7497255249934415, |
| "learning_rate": 1.3217709055556638e-07, |
| "loss": 0.0293, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.8562874251497006, |
| "grad_norm": 0.7557654251140918, |
| "learning_rate": 1.268598567449647e-07, |
| "loss": 0.0294, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.8592814371257484, |
| "grad_norm": 0.7737192110553284, |
| "learning_rate": 1.2165040708930763e-07, |
| "loss": 0.0297, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.8622754491017965, |
| "grad_norm": 0.5870476070829563, |
| "learning_rate": 1.1654885681124661e-07, |
| "loss": 0.0236, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.8652694610778444, |
| "grad_norm": 0.7595469833939291, |
| "learning_rate": 1.1155531874691372e-07, |
| "loss": 0.0263, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.8682634730538923, |
| "grad_norm": 0.7912993151046831, |
| "learning_rate": 1.0666990334342708e-07, |
| "loss": 0.0258, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.8712574850299402, |
| "grad_norm": 0.7059884315250772, |
| "learning_rate": 1.0189271865644445e-07, |
| "loss": 0.0232, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.874251497005988, |
| "grad_norm": 0.7830213909548226, |
| "learning_rate": 9.722387034777847e-08, |
| "loss": 0.0257, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.877245508982036, |
| "grad_norm": 0.8042684494196181, |
| "learning_rate": 9.266346168305518e-08, |
| "loss": 0.0327, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.8802395209580838, |
| "grad_norm": 0.7598214971995989, |
| "learning_rate": 8.821159352943142e-08, |
| "loss": 0.0247, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.8832335329341316, |
| "grad_norm": 0.6847376918437824, |
| "learning_rate": 8.38683643533661e-08, |
| "loss": 0.0228, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.8862275449101795, |
| "grad_norm": 0.7885075330216371, |
| "learning_rate": 7.963387021843683e-08, |
| "loss": 0.0225, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.8892215568862274, |
| "grad_norm": 0.8403888633325877, |
| "learning_rate": 7.550820478322285e-08, |
| "loss": 0.03, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.8922155688622755, |
| "grad_norm": 0.7839221265900145, |
| "learning_rate": 7.149145929922607e-08, |
| "loss": 0.0346, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.8952095808383234, |
| "grad_norm": 0.8825142678915027, |
| "learning_rate": 6.758372260885714e-08, |
| "loss": 0.036, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.8982035928143712, |
| "grad_norm": 0.8820324482632153, |
| "learning_rate": 6.378508114346982e-08, |
| "loss": 0.0361, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.9011976047904193, |
| "grad_norm": 0.8295764701865402, |
| "learning_rate": 6.009561892144744e-08, |
| "loss": 0.0296, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.9041916167664672, |
| "grad_norm": 0.7587867160827685, |
| "learning_rate": 5.651541754634726e-08, |
| "loss": 0.0243, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.907185628742515, |
| "grad_norm": 1.1572414445874655, |
| "learning_rate": 5.304455620509297e-08, |
| "loss": 0.0407, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.910179640718563, |
| "grad_norm": 0.8213888631978954, |
| "learning_rate": 4.968311166622553e-08, |
| "loss": 0.0301, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.9131736526946108, |
| "grad_norm": 0.7684142991073891, |
| "learning_rate": 4.643115827820399e-08, |
| "loss": 0.028, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.9161676646706587, |
| "grad_norm": 0.7261099591746644, |
| "learning_rate": 4.328876796776071e-08, |
| "loss": 0.0253, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.9191616766467066, |
| "grad_norm": 0.8085083822365345, |
| "learning_rate": 4.0256010238310936e-08, |
| "loss": 0.0305, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.9221556886227544, |
| "grad_norm": 0.7885260127800128, |
| "learning_rate": 3.733295216841626e-08, |
| "loss": 0.0293, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.9251497005988023, |
| "grad_norm": 0.9966837288767681, |
| "learning_rate": 3.451965841029914e-08, |
| "loss": 0.0409, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.9281437125748502, |
| "grad_norm": 0.6527694593103184, |
| "learning_rate": 3.181619118841517e-08, |
| "loss": 0.0208, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.931137724550898, |
| "grad_norm": 0.6434419204390688, |
| "learning_rate": 2.9222610298074717e-08, |
| "loss": 0.0205, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.9341317365269461, |
| "grad_norm": 0.725497894132429, |
| "learning_rate": 2.673897310412288e-08, |
| "loss": 0.0259, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.937125748502994, |
| "grad_norm": 0.6852583371487462, |
| "learning_rate": 2.4365334539667717e-08, |
| "loss": 0.0244, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.9401197604790419, |
| "grad_norm": 0.8260510760174733, |
| "learning_rate": 2.210174710486679e-08, |
| "loss": 0.0353, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.94311377245509, |
| "grad_norm": 0.5411743202112663, |
| "learning_rate": 1.99482608657664e-08, |
| "loss": 0.0201, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.9461077844311379, |
| "grad_norm": 0.6786876049644537, |
| "learning_rate": 1.7904923453193056e-08, |
| "loss": 0.0235, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.9491017964071857, |
| "grad_norm": 0.791955901981055, |
| "learning_rate": 1.5971780061701524e-08, |
| "loss": 0.0246, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.9520958083832336, |
| "grad_norm": 0.6738215438769756, |
| "learning_rate": 1.4148873448573408e-08, |
| "loss": 0.0244, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.9550898203592815, |
| "grad_norm": 0.7286329828267174, |
| "learning_rate": 1.2436243932872349e-08, |
| "loss": 0.0262, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.9580838323353293, |
| "grad_norm": 0.7316191284999307, |
| "learning_rate": 1.0833929394552523e-08, |
| "loss": 0.0226, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.9610778443113772, |
| "grad_norm": 0.7336140222712861, |
| "learning_rate": 9.341965273621522e-09, |
| "loss": 0.0247, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.964071856287425, |
| "grad_norm": 1.0225380383840057, |
| "learning_rate": 7.96038456935322e-09, |
| "loss": 0.0316, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.967065868263473, |
| "grad_norm": 0.9926980469443579, |
| "learning_rate": 6.6892178395611125e-09, |
| "loss": 0.0371, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.9700598802395208, |
| "grad_norm": 0.8122232095047586, |
| "learning_rate": 5.528493199922769e-09, |
| "loss": 0.0296, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.9730538922155687, |
| "grad_norm": 0.6964096373843873, |
| "learning_rate": 4.478236323355312e-09, |
| "loss": 0.0269, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.9760479041916168, |
| "grad_norm": 0.6747332635033737, |
| "learning_rate": 3.538470439448105e-09, |
| "loss": 0.0235, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.9790419161676647, |
| "grad_norm": 0.9678315204729531, |
| "learning_rate": 2.709216333952602e-09, |
| "loss": 0.0336, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.9820359281437125, |
| "grad_norm": 0.7122196382626427, |
| "learning_rate": 1.9904923483171632e-09, |
| "loss": 0.0276, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.9850299401197606, |
| "grad_norm": 0.9035083163114916, |
| "learning_rate": 1.3823143792851545e-09, |
| "loss": 0.033, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.9880239520958085, |
| "grad_norm": 1.0076961376070062, |
| "learning_rate": 8.846958785418969e-10, |
| "loss": 0.0416, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.9910179640718564, |
| "grad_norm": 0.6443600818514333, |
| "learning_rate": 4.97647852417682e-10, |
| "loss": 0.0225, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.9940119760479043, |
| "grad_norm": 0.7873480944429055, |
| "learning_rate": 2.2117886164407797e-10, |
| "loss": 0.0264, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.9970059880239521, |
| "grad_norm": 0.7450213849077284, |
| "learning_rate": 5.529502116519148e-11, |
| "loss": 0.0285, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.4334108045966172, |
| "learning_rate": 0.0, |
| "loss": 0.0101, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.0, |
| "step": 668, |
| "total_flos": 16797260906496.0, |
| "train_loss": 0.06999007535507222, |
| "train_runtime": 1799.694, |
| "train_samples_per_second": 5.922, |
| "train_steps_per_second": 0.371 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 668, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 2000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 16797260906496.0, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|