| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.2196126277260944, |
| "eval_steps": 500, |
| "global_step": 2000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0, |
| "eval_loss": 0.3103026747703552, |
| "eval_runtime": 148.5907, |
| "eval_samples_per_second": 7.201, |
| "eval_steps_per_second": 0.229, |
| "step": 0 |
| }, |
| { |
| "epoch": 0.0006100350770169285, |
| "grad_norm": 0.8454470981023452, |
| "learning_rate": 0.0, |
| "loss": 0.3287, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.001220070154033857, |
| "grad_norm": 0.7768184622203883, |
| "learning_rate": 3.0487804878048784e-08, |
| "loss": 0.3074, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0018301052310507853, |
| "grad_norm": 0.8507240324798782, |
| "learning_rate": 6.097560975609757e-08, |
| "loss": 0.3157, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.002440140308067714, |
| "grad_norm": 0.7931819707357988, |
| "learning_rate": 9.146341463414634e-08, |
| "loss": 0.3158, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0030501753850846426, |
| "grad_norm": 0.7827519593120217, |
| "learning_rate": 1.2195121951219514e-07, |
| "loss": 0.3054, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0036602104621015707, |
| "grad_norm": 0.924981881710818, |
| "learning_rate": 1.5243902439024392e-07, |
| "loss": 0.319, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0042702455391185, |
| "grad_norm": 0.8094982837416114, |
| "learning_rate": 1.8292682926829268e-07, |
| "loss": 0.2729, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.004880280616135428, |
| "grad_norm": 0.7880554417981912, |
| "learning_rate": 2.134146341463415e-07, |
| "loss": 0.3025, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.005490315693152357, |
| "grad_norm": 0.7959447687740115, |
| "learning_rate": 2.439024390243903e-07, |
| "loss": 0.3053, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.006100350770169285, |
| "grad_norm": 0.82247619574212, |
| "learning_rate": 2.7439024390243906e-07, |
| "loss": 0.3097, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.006710385847186213, |
| "grad_norm": 0.8010327654330195, |
| "learning_rate": 3.0487804878048784e-07, |
| "loss": 0.3284, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.007320420924203141, |
| "grad_norm": 0.867259966178027, |
| "learning_rate": 3.3536585365853663e-07, |
| "loss": 0.3263, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.00793045600122007, |
| "grad_norm": 0.6858690138397889, |
| "learning_rate": 3.6585365853658536e-07, |
| "loss": 0.3062, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.008540491078237, |
| "grad_norm": 0.6968955854258266, |
| "learning_rate": 3.963414634146342e-07, |
| "loss": 0.3118, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.009150526155253927, |
| "grad_norm": 0.7149293379600384, |
| "learning_rate": 4.26829268292683e-07, |
| "loss": 0.2998, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.009760561232270856, |
| "grad_norm": 0.8384371568701614, |
| "learning_rate": 4.573170731707317e-07, |
| "loss": 0.2989, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.010370596309287784, |
| "grad_norm": 0.7959356057845837, |
| "learning_rate": 4.878048780487805e-07, |
| "loss": 0.3226, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.010980631386304713, |
| "grad_norm": 0.5860521260046243, |
| "learning_rate": 5.182926829268293e-07, |
| "loss": 0.3293, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.011590666463321641, |
| "grad_norm": 0.42435828625242183, |
| "learning_rate": 5.487804878048781e-07, |
| "loss": 0.2976, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.01220070154033857, |
| "grad_norm": 0.4723062068417477, |
| "learning_rate": 5.79268292682927e-07, |
| "loss": 0.2906, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.012810736617355498, |
| "grad_norm": 0.5311333611487327, |
| "learning_rate": 6.097560975609757e-07, |
| "loss": 0.3175, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.013420771694372426, |
| "grad_norm": 0.43195173965189027, |
| "learning_rate": 6.402439024390244e-07, |
| "loss": 0.3326, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.014030806771389355, |
| "grad_norm": 0.5039374654423954, |
| "learning_rate": 6.707317073170733e-07, |
| "loss": 0.3243, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.014640841848406283, |
| "grad_norm": 0.48160299981914606, |
| "learning_rate": 7.012195121951221e-07, |
| "loss": 0.3107, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.015250876925423212, |
| "grad_norm": 0.4076510164467301, |
| "learning_rate": 7.317073170731707e-07, |
| "loss": 0.3197, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.01586091200244014, |
| "grad_norm": 0.3353897704259775, |
| "learning_rate": 7.621951219512196e-07, |
| "loss": 0.3119, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.01647094707945707, |
| "grad_norm": 0.3150496553683514, |
| "learning_rate": 7.926829268292684e-07, |
| "loss": 0.3008, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.017080982156474, |
| "grad_norm": 0.32105723872932035, |
| "learning_rate": 8.231707317073172e-07, |
| "loss": 0.2905, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.017691017233490924, |
| "grad_norm": 0.3280306420088101, |
| "learning_rate": 8.53658536585366e-07, |
| "loss": 0.2707, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.018301052310507854, |
| "grad_norm": 0.30340527994818084, |
| "learning_rate": 8.841463414634147e-07, |
| "loss": 0.311, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.018911087387524783, |
| "grad_norm": 0.32281828347533853, |
| "learning_rate": 9.146341463414634e-07, |
| "loss": 0.3284, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.019521122464541713, |
| "grad_norm": 0.28874292519953626, |
| "learning_rate": 9.451219512195123e-07, |
| "loss": 0.29, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.02013115754155864, |
| "grad_norm": 0.3027310057466991, |
| "learning_rate": 9.75609756097561e-07, |
| "loss": 0.314, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.020741192618575568, |
| "grad_norm": 0.3008812932109969, |
| "learning_rate": 1.0060975609756098e-06, |
| "loss": 0.3144, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.021351227695592497, |
| "grad_norm": 0.27169807758622333, |
| "learning_rate": 1.0365853658536586e-06, |
| "loss": 0.2963, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.021961262772609427, |
| "grad_norm": 0.2695339964388015, |
| "learning_rate": 1.0670731707317073e-06, |
| "loss": 0.2893, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.022571297849626352, |
| "grad_norm": 0.2843145950012066, |
| "learning_rate": 1.0975609756097562e-06, |
| "loss": 0.323, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.023181332926643282, |
| "grad_norm": 0.29602446856008774, |
| "learning_rate": 1.128048780487805e-06, |
| "loss": 0.3209, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.02379136800366021, |
| "grad_norm": 0.3423946407175081, |
| "learning_rate": 1.158536585365854e-06, |
| "loss": 0.3061, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.02440140308067714, |
| "grad_norm": 0.3612641386449874, |
| "learning_rate": 1.1890243902439024e-06, |
| "loss": 0.31, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.025011438157694067, |
| "grad_norm": 0.3325638984346252, |
| "learning_rate": 1.2195121951219514e-06, |
| "loss": 0.3158, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.025621473234710996, |
| "grad_norm": 0.31401563220686574, |
| "learning_rate": 1.25e-06, |
| "loss": 0.2922, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.026231508311727925, |
| "grad_norm": 0.3170156202095663, |
| "learning_rate": 1.2804878048780488e-06, |
| "loss": 0.3141, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.02684154338874485, |
| "grad_norm": 0.32428209772408506, |
| "learning_rate": 1.3109756097560978e-06, |
| "loss": 0.2877, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.02745157846576178, |
| "grad_norm": 0.2937477314360728, |
| "learning_rate": 1.3414634146341465e-06, |
| "loss": 0.2825, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.02806161354277871, |
| "grad_norm": 0.2958373552353005, |
| "learning_rate": 1.3719512195121952e-06, |
| "loss": 0.3008, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.02867164861979564, |
| "grad_norm": 0.2880444673791773, |
| "learning_rate": 1.4024390243902442e-06, |
| "loss": 0.3016, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.029281683696812565, |
| "grad_norm": 0.2789112072045315, |
| "learning_rate": 1.4329268292682927e-06, |
| "loss": 0.3025, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.029891718773829495, |
| "grad_norm": 0.25724915007561794, |
| "learning_rate": 1.4634146341463414e-06, |
| "loss": 0.2747, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.030501753850846424, |
| "grad_norm": 0.2635046087062574, |
| "learning_rate": 1.4939024390243904e-06, |
| "loss": 0.2984, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.031111788927863353, |
| "grad_norm": 0.26015662456743477, |
| "learning_rate": 1.5243902439024391e-06, |
| "loss": 0.3243, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.03172182400488028, |
| "grad_norm": 0.24509502760724197, |
| "learning_rate": 1.5548780487804878e-06, |
| "loss": 0.321, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.03233185908189721, |
| "grad_norm": 0.2908236246505609, |
| "learning_rate": 1.5853658536585368e-06, |
| "loss": 0.2937, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.03294189415891414, |
| "grad_norm": 0.2718558063137255, |
| "learning_rate": 1.6158536585365855e-06, |
| "loss": 0.3195, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.03355192923593107, |
| "grad_norm": 0.22727302568286986, |
| "learning_rate": 1.6463414634146345e-06, |
| "loss": 0.2784, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.034161964312948, |
| "grad_norm": 0.30238990509268004, |
| "learning_rate": 1.6768292682926832e-06, |
| "loss": 0.3057, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.034771999389964926, |
| "grad_norm": 0.23644592363288297, |
| "learning_rate": 1.707317073170732e-06, |
| "loss": 0.2666, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.03538203446698185, |
| "grad_norm": 0.24896950567681486, |
| "learning_rate": 1.7378048780487804e-06, |
| "loss": 0.3081, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.03599206954399878, |
| "grad_norm": 0.24000954305492433, |
| "learning_rate": 1.7682926829268294e-06, |
| "loss": 0.2852, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.03660210462101571, |
| "grad_norm": 0.23441121755488187, |
| "learning_rate": 1.7987804878048781e-06, |
| "loss": 0.297, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.03721213969803264, |
| "grad_norm": 0.22232489603709266, |
| "learning_rate": 1.8292682926829268e-06, |
| "loss": 0.2634, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.037822174775049566, |
| "grad_norm": 0.24052014046880021, |
| "learning_rate": 1.8597560975609758e-06, |
| "loss": 0.2977, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.038432209852066496, |
| "grad_norm": 0.23679217772055688, |
| "learning_rate": 1.8902439024390245e-06, |
| "loss": 0.3012, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.039042244929083425, |
| "grad_norm": 0.2223272566001243, |
| "learning_rate": 1.9207317073170733e-06, |
| "loss": 0.2738, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.03965228000610035, |
| "grad_norm": 0.22669213310403505, |
| "learning_rate": 1.951219512195122e-06, |
| "loss": 0.2846, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.04026231508311728, |
| "grad_norm": 0.2414151537454095, |
| "learning_rate": 1.981707317073171e-06, |
| "loss": 0.303, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.040872350160134206, |
| "grad_norm": 0.2370672126840143, |
| "learning_rate": 2.0121951219512197e-06, |
| "loss": 0.2904, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.041482385237151136, |
| "grad_norm": 0.22809840538333365, |
| "learning_rate": 2.042682926829268e-06, |
| "loss": 0.2721, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.042092420314168065, |
| "grad_norm": 0.2272895442779121, |
| "learning_rate": 2.073170731707317e-06, |
| "loss": 0.2707, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.042702455391184994, |
| "grad_norm": 0.24377585427116602, |
| "learning_rate": 2.103658536585366e-06, |
| "loss": 0.303, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.043312490468201924, |
| "grad_norm": 0.2254822602630038, |
| "learning_rate": 2.1341463414634146e-06, |
| "loss": 0.2956, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.04392252554521885, |
| "grad_norm": 0.23412386235006508, |
| "learning_rate": 2.1646341463414635e-06, |
| "loss": 0.2861, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.044532560622235776, |
| "grad_norm": 0.224370421334331, |
| "learning_rate": 2.1951219512195125e-06, |
| "loss": 0.2911, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.045142595699252705, |
| "grad_norm": 0.23593807167360495, |
| "learning_rate": 2.225609756097561e-06, |
| "loss": 0.2875, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.045752630776269634, |
| "grad_norm": 0.22302811027781724, |
| "learning_rate": 2.25609756097561e-06, |
| "loss": 0.296, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.046362665853286564, |
| "grad_norm": 0.22758322152961505, |
| "learning_rate": 2.286585365853659e-06, |
| "loss": 0.283, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.04697270093030349, |
| "grad_norm": 0.21591645504761844, |
| "learning_rate": 2.317073170731708e-06, |
| "loss": 0.2882, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.04758273600732042, |
| "grad_norm": 0.21879095336822568, |
| "learning_rate": 2.3475609756097563e-06, |
| "loss": 0.257, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.04819277108433735, |
| "grad_norm": 0.22323134239569448, |
| "learning_rate": 2.378048780487805e-06, |
| "loss": 0.2727, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.04880280616135428, |
| "grad_norm": 0.224943076374116, |
| "learning_rate": 2.408536585365854e-06, |
| "loss": 0.3018, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.049412841238371204, |
| "grad_norm": 0.23309225935335282, |
| "learning_rate": 2.4390243902439027e-06, |
| "loss": 0.2896, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.05002287631538813, |
| "grad_norm": 0.21422363034398334, |
| "learning_rate": 2.4695121951219513e-06, |
| "loss": 0.2568, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.05063291139240506, |
| "grad_norm": 0.23006182370727415, |
| "learning_rate": 2.5e-06, |
| "loss": 0.2893, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.05124294646942199, |
| "grad_norm": 0.23133424818577172, |
| "learning_rate": 2.530487804878049e-06, |
| "loss": 0.2776, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.05185298154643892, |
| "grad_norm": 0.22007966657198397, |
| "learning_rate": 2.5609756097560977e-06, |
| "loss": 0.2941, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.05246301662345585, |
| "grad_norm": 0.22950578198182395, |
| "learning_rate": 2.5914634146341466e-06, |
| "loss": 0.2903, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.05307305170047278, |
| "grad_norm": 0.2119291405117646, |
| "learning_rate": 2.6219512195121956e-06, |
| "loss": 0.289, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.0536830867774897, |
| "grad_norm": 0.23230876607332274, |
| "learning_rate": 2.652439024390244e-06, |
| "loss": 0.2827, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.05429312185450663, |
| "grad_norm": 0.22839417035165244, |
| "learning_rate": 2.682926829268293e-06, |
| "loss": 0.3045, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.05490315693152356, |
| "grad_norm": 0.22634052130525656, |
| "learning_rate": 2.713414634146342e-06, |
| "loss": 0.2724, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.05551319200854049, |
| "grad_norm": 0.21975015253358074, |
| "learning_rate": 2.7439024390243905e-06, |
| "loss": 0.2723, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.05612322708555742, |
| "grad_norm": 0.225336062841578, |
| "learning_rate": 2.7743902439024394e-06, |
| "loss": 0.2998, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.05673326216257435, |
| "grad_norm": 0.21097673082672147, |
| "learning_rate": 2.8048780487804884e-06, |
| "loss": 0.2827, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.05734329723959128, |
| "grad_norm": 0.21759463074705687, |
| "learning_rate": 2.8353658536585365e-06, |
| "loss": 0.2841, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.05795333231660821, |
| "grad_norm": 0.23262389502742112, |
| "learning_rate": 2.8658536585365854e-06, |
| "loss": 0.3015, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.05856336739362513, |
| "grad_norm": 0.21924384664120367, |
| "learning_rate": 2.8963414634146343e-06, |
| "loss": 0.2772, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.05917340247064206, |
| "grad_norm": 0.2257203912109911, |
| "learning_rate": 2.926829268292683e-06, |
| "loss": 0.2727, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.05978343754765899, |
| "grad_norm": 0.2271960932362661, |
| "learning_rate": 2.957317073170732e-06, |
| "loss": 0.2792, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.06039347262467592, |
| "grad_norm": 0.21768279717594183, |
| "learning_rate": 2.9878048780487808e-06, |
| "loss": 0.2804, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.06100350770169285, |
| "grad_norm": 0.2233152083910907, |
| "learning_rate": 3.0182926829268293e-06, |
| "loss": 0.2655, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.06161354277870978, |
| "grad_norm": 0.22662101387006744, |
| "learning_rate": 3.0487804878048782e-06, |
| "loss": 0.2703, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.06222357785572671, |
| "grad_norm": 0.22292006274531256, |
| "learning_rate": 3.079268292682927e-06, |
| "loss": 0.2954, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.06283361293274363, |
| "grad_norm": 0.220071896219013, |
| "learning_rate": 3.1097560975609757e-06, |
| "loss": 0.2476, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.06344364800976056, |
| "grad_norm": 0.22535333283145692, |
| "learning_rate": 3.1402439024390246e-06, |
| "loss": 0.2795, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.06405368308677749, |
| "grad_norm": 0.22789495334564042, |
| "learning_rate": 3.1707317073170736e-06, |
| "loss": 0.3072, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.06466371816379442, |
| "grad_norm": 0.22845970490853684, |
| "learning_rate": 3.201219512195122e-06, |
| "loss": 0.2928, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.06527375324081135, |
| "grad_norm": 0.22721669813586723, |
| "learning_rate": 3.231707317073171e-06, |
| "loss": 0.2701, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.06588378831782828, |
| "grad_norm": 0.22501246073953735, |
| "learning_rate": 3.26219512195122e-06, |
| "loss": 0.295, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.0664938233948452, |
| "grad_norm": 0.23994963496074384, |
| "learning_rate": 3.292682926829269e-06, |
| "loss": 0.3106, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.06710385847186214, |
| "grad_norm": 0.21902655359899867, |
| "learning_rate": 3.3231707317073174e-06, |
| "loss": 0.2812, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.06771389354887906, |
| "grad_norm": 0.22318592923675146, |
| "learning_rate": 3.3536585365853664e-06, |
| "loss": 0.2835, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.068323928625896, |
| "grad_norm": 0.26363594205403645, |
| "learning_rate": 3.3841463414634153e-06, |
| "loss": 0.2903, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.06893396370291292, |
| "grad_norm": 0.22377433701651386, |
| "learning_rate": 3.414634146341464e-06, |
| "loss": 0.2743, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.06954399877992985, |
| "grad_norm": 0.23626130625058447, |
| "learning_rate": 3.4451219512195124e-06, |
| "loss": 0.299, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.07015403385694677, |
| "grad_norm": 0.223610053073903, |
| "learning_rate": 3.475609756097561e-06, |
| "loss": 0.2555, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.0707640689339637, |
| "grad_norm": 0.21087836508889907, |
| "learning_rate": 3.50609756097561e-06, |
| "loss": 0.2649, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.07137410401098063, |
| "grad_norm": 0.24915773429575874, |
| "learning_rate": 3.5365853658536588e-06, |
| "loss": 0.2936, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.07198413908799756, |
| "grad_norm": 0.24129416709675272, |
| "learning_rate": 3.5670731707317073e-06, |
| "loss": 0.2862, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.07259417416501449, |
| "grad_norm": 0.24255820924509042, |
| "learning_rate": 3.5975609756097562e-06, |
| "loss": 0.2816, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.07320420924203141, |
| "grad_norm": 0.22760789782296764, |
| "learning_rate": 3.628048780487805e-06, |
| "loss": 0.281, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.07381424431904834, |
| "grad_norm": 0.2435576310663892, |
| "learning_rate": 3.6585365853658537e-06, |
| "loss": 0.29, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.07442427939606527, |
| "grad_norm": 0.23105027146598722, |
| "learning_rate": 3.6890243902439026e-06, |
| "loss": 0.2801, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.0750343144730822, |
| "grad_norm": 0.23364310548660652, |
| "learning_rate": 3.7195121951219516e-06, |
| "loss": 0.266, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.07564434955009913, |
| "grad_norm": 0.24832037252957492, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.306, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.07625438462711606, |
| "grad_norm": 0.2357002851720913, |
| "learning_rate": 3.780487804878049e-06, |
| "loss": 0.2933, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.07686441970413299, |
| "grad_norm": 0.22556207921999175, |
| "learning_rate": 3.810975609756098e-06, |
| "loss": 0.2841, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.07747445478114992, |
| "grad_norm": 0.24161879175738435, |
| "learning_rate": 3.8414634146341465e-06, |
| "loss": 0.2844, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.07808448985816685, |
| "grad_norm": 0.23782173864569195, |
| "learning_rate": 3.8719512195121954e-06, |
| "loss": 0.285, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.07869452493518378, |
| "grad_norm": 0.22466061236443696, |
| "learning_rate": 3.902439024390244e-06, |
| "loss": 0.2658, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.0793045600122007, |
| "grad_norm": 0.21752730672405302, |
| "learning_rate": 3.932926829268293e-06, |
| "loss": 0.2773, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.07991459508921762, |
| "grad_norm": 0.23637204821920585, |
| "learning_rate": 3.963414634146342e-06, |
| "loss": 0.2775, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.08052463016623455, |
| "grad_norm": 0.22799647607474074, |
| "learning_rate": 3.99390243902439e-06, |
| "loss": 0.2636, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.08113466524325148, |
| "grad_norm": 0.22714228884278848, |
| "learning_rate": 4.024390243902439e-06, |
| "loss": 0.2615, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.08174470032026841, |
| "grad_norm": 0.21398203363805257, |
| "learning_rate": 4.054878048780488e-06, |
| "loss": 0.2507, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.08235473539728534, |
| "grad_norm": 0.2264462526855402, |
| "learning_rate": 4.085365853658536e-06, |
| "loss": 0.2691, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.08296477047430227, |
| "grad_norm": 0.23753082090613722, |
| "learning_rate": 4.115853658536585e-06, |
| "loss": 0.2824, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.0835748055513192, |
| "grad_norm": 0.23304073294964817, |
| "learning_rate": 4.146341463414634e-06, |
| "loss": 0.2826, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.08418484062833613, |
| "grad_norm": 0.23595581075210828, |
| "learning_rate": 4.176829268292683e-06, |
| "loss": 0.2916, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.08479487570535306, |
| "grad_norm": 0.22041886879240966, |
| "learning_rate": 4.207317073170732e-06, |
| "loss": 0.2369, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.08540491078236999, |
| "grad_norm": 0.24206221610814158, |
| "learning_rate": 4.237804878048781e-06, |
| "loss": 0.2821, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.08601494585938692, |
| "grad_norm": 0.2393670152036277, |
| "learning_rate": 4.268292682926829e-06, |
| "loss": 0.2791, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.08662498093640385, |
| "grad_norm": 0.23189352334620775, |
| "learning_rate": 4.298780487804878e-06, |
| "loss": 0.2919, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.08723501601342078, |
| "grad_norm": 0.242567561823643, |
| "learning_rate": 4.329268292682927e-06, |
| "loss": 0.2831, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.0878450510904377, |
| "grad_norm": 0.23058885789044348, |
| "learning_rate": 4.359756097560976e-06, |
| "loss": 0.269, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.08845508616745462, |
| "grad_norm": 0.22276270950122776, |
| "learning_rate": 4.390243902439025e-06, |
| "loss": 0.2517, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.08906512124447155, |
| "grad_norm": 0.23742463465017807, |
| "learning_rate": 4.420731707317074e-06, |
| "loss": 0.284, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.08967515632148848, |
| "grad_norm": 0.2331170754231291, |
| "learning_rate": 4.451219512195122e-06, |
| "loss": 0.2624, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.09028519139850541, |
| "grad_norm": 0.2308037721395711, |
| "learning_rate": 4.481707317073171e-06, |
| "loss": 0.2539, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.09089522647552234, |
| "grad_norm": 0.22849284994691954, |
| "learning_rate": 4.51219512195122e-06, |
| "loss": 0.2826, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.09150526155253927, |
| "grad_norm": 0.22290125661574434, |
| "learning_rate": 4.542682926829269e-06, |
| "loss": 0.2834, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.0921152966295562, |
| "grad_norm": 0.22294802055510163, |
| "learning_rate": 4.573170731707318e-06, |
| "loss": 0.2664, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.09272533170657313, |
| "grad_norm": 0.22216693279188238, |
| "learning_rate": 4.603658536585367e-06, |
| "loss": 0.2722, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.09333536678359006, |
| "grad_norm": 0.22847119099300714, |
| "learning_rate": 4.634146341463416e-06, |
| "loss": 0.3077, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.09394540186060699, |
| "grad_norm": 0.22825237894656306, |
| "learning_rate": 4.664634146341464e-06, |
| "loss": 0.279, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.09455543693762392, |
| "grad_norm": 0.2265738168314992, |
| "learning_rate": 4.695121951219513e-06, |
| "loss": 0.2743, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.09516547201464085, |
| "grad_norm": 0.2379254364113949, |
| "learning_rate": 4.725609756097561e-06, |
| "loss": 0.2821, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.09577550709165777, |
| "grad_norm": 0.2533303804327538, |
| "learning_rate": 4.75609756097561e-06, |
| "loss": 0.2812, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.0963855421686747, |
| "grad_norm": 0.21582395227434673, |
| "learning_rate": 4.786585365853659e-06, |
| "loss": 0.2557, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.09699557724569163, |
| "grad_norm": 0.23974503879223094, |
| "learning_rate": 4.817073170731708e-06, |
| "loss": 0.2842, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.09760561232270856, |
| "grad_norm": 0.23764839363381726, |
| "learning_rate": 4.8475609756097565e-06, |
| "loss": 0.2903, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.09821564739972548, |
| "grad_norm": 0.2340765128011826, |
| "learning_rate": 4.8780487804878055e-06, |
| "loss": 0.2749, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.09882568247674241, |
| "grad_norm": 0.21460415213463546, |
| "learning_rate": 4.908536585365854e-06, |
| "loss": 0.2611, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.09943571755375934, |
| "grad_norm": 0.24181561044684743, |
| "learning_rate": 4.9390243902439025e-06, |
| "loss": 0.2844, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.10004575263077627, |
| "grad_norm": 0.2405356599063858, |
| "learning_rate": 4.9695121951219515e-06, |
| "loss": 0.2965, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.1006557877077932, |
| "grad_norm": 0.21661402310556976, |
| "learning_rate": 5e-06, |
| "loss": 0.2464, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.10126582278481013, |
| "grad_norm": 0.2262455083804811, |
| "learning_rate": 5.030487804878049e-06, |
| "loss": 0.273, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.10187585786182705, |
| "grad_norm": 0.2365786862729776, |
| "learning_rate": 5.060975609756098e-06, |
| "loss": 0.3042, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.10248589293884398, |
| "grad_norm": 0.23673959129795522, |
| "learning_rate": 5.091463414634147e-06, |
| "loss": 0.2742, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.10309592801586091, |
| "grad_norm": 0.25419425400530005, |
| "learning_rate": 5.121951219512195e-06, |
| "loss": 0.3005, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.10370596309287784, |
| "grad_norm": 0.2419749655946345, |
| "learning_rate": 5.152439024390244e-06, |
| "loss": 0.2885, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.10431599816989477, |
| "grad_norm": 0.21393573151462877, |
| "learning_rate": 5.182926829268293e-06, |
| "loss": 0.264, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.1049260332469117, |
| "grad_norm": 0.2362091062195042, |
| "learning_rate": 5.213414634146342e-06, |
| "loss": 0.2585, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.10553606832392863, |
| "grad_norm": 0.2704586811251837, |
| "learning_rate": 5.243902439024391e-06, |
| "loss": 0.2778, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.10614610340094556, |
| "grad_norm": 0.2307320156865811, |
| "learning_rate": 5.27439024390244e-06, |
| "loss": 0.2724, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.10675613847796249, |
| "grad_norm": 0.24480944164566806, |
| "learning_rate": 5.304878048780488e-06, |
| "loss": 0.2793, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.1073661735549794, |
| "grad_norm": 0.24120432626907268, |
| "learning_rate": 5.335365853658537e-06, |
| "loss": 0.2594, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.10797620863199633, |
| "grad_norm": 0.24750812136821748, |
| "learning_rate": 5.365853658536586e-06, |
| "loss": 0.2827, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.10858624370901326, |
| "grad_norm": 0.24013243345908472, |
| "learning_rate": 5.396341463414635e-06, |
| "loss": 0.2573, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.1091962787860302, |
| "grad_norm": 0.2609299332154205, |
| "learning_rate": 5.426829268292684e-06, |
| "loss": 0.2874, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.10980631386304712, |
| "grad_norm": 0.26281954073730773, |
| "learning_rate": 5.457317073170733e-06, |
| "loss": 0.3054, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.11041634894006405, |
| "grad_norm": 0.24458036151237894, |
| "learning_rate": 5.487804878048781e-06, |
| "loss": 0.2805, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.11102638401708098, |
| "grad_norm": 0.24512128267371772, |
| "learning_rate": 5.51829268292683e-06, |
| "loss": 0.2914, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.11163641909409791, |
| "grad_norm": 0.23669169587728295, |
| "learning_rate": 5.548780487804879e-06, |
| "loss": 0.2786, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.11224645417111484, |
| "grad_norm": 0.24697726917520454, |
| "learning_rate": 5.579268292682928e-06, |
| "loss": 0.2767, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.11285648924813177, |
| "grad_norm": 0.249714640479248, |
| "learning_rate": 5.609756097560977e-06, |
| "loss": 0.2685, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.1134665243251487, |
| "grad_norm": 0.24841021945031513, |
| "learning_rate": 5.640243902439024e-06, |
| "loss": 0.2649, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.11407655940216563, |
| "grad_norm": 0.23859920700579967, |
| "learning_rate": 5.670731707317073e-06, |
| "loss": 0.2858, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.11468659447918256, |
| "grad_norm": 0.2584753235599307, |
| "learning_rate": 5.701219512195122e-06, |
| "loss": 0.2747, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.11529662955619949, |
| "grad_norm": 0.23156317539426982, |
| "learning_rate": 5.731707317073171e-06, |
| "loss": 0.2529, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.11590666463321642, |
| "grad_norm": 0.22687147729532126, |
| "learning_rate": 5.76219512195122e-06, |
| "loss": 0.2826, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.11651669971023333, |
| "grad_norm": 0.25517296147028024, |
| "learning_rate": 5.792682926829269e-06, |
| "loss": 0.2559, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.11712673478725026, |
| "grad_norm": 0.2473059947585708, |
| "learning_rate": 5.823170731707317e-06, |
| "loss": 0.2617, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.11773676986426719, |
| "grad_norm": 0.2475596917448232, |
| "learning_rate": 5.853658536585366e-06, |
| "loss": 0.2757, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.11834680494128412, |
| "grad_norm": 0.24485621157389584, |
| "learning_rate": 5.884146341463415e-06, |
| "loss": 0.2801, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.11895684001830105, |
| "grad_norm": 0.2407535109141067, |
| "learning_rate": 5.914634146341464e-06, |
| "loss": 0.2541, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.11956687509531798, |
| "grad_norm": 0.29475882116862984, |
| "learning_rate": 5.9451219512195126e-06, |
| "loss": 0.2672, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.12017691017233491, |
| "grad_norm": 0.24490394924599188, |
| "learning_rate": 5.9756097560975615e-06, |
| "loss": 0.265, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.12078694524935184, |
| "grad_norm": 0.24048637934439834, |
| "learning_rate": 6.0060975609756104e-06, |
| "loss": 0.2616, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.12139698032636877, |
| "grad_norm": 0.2449235795372263, |
| "learning_rate": 6.0365853658536585e-06, |
| "loss": 0.2771, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.1220070154033857, |
| "grad_norm": 0.24010278455894474, |
| "learning_rate": 6.0670731707317075e-06, |
| "loss": 0.2487, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.12261705048040263, |
| "grad_norm": 0.2501744987758223, |
| "learning_rate": 6.0975609756097564e-06, |
| "loss": 0.2769, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.12322708555741956, |
| "grad_norm": 0.24813908759409767, |
| "learning_rate": 6.128048780487805e-06, |
| "loss": 0.2705, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.12383712063443648, |
| "grad_norm": 0.24533160040379387, |
| "learning_rate": 6.158536585365854e-06, |
| "loss": 0.2815, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.12444715571145341, |
| "grad_norm": 0.2437938947166487, |
| "learning_rate": 6.189024390243903e-06, |
| "loss": 0.2521, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.12505719078847033, |
| "grad_norm": 0.26225596287555486, |
| "learning_rate": 6.219512195121951e-06, |
| "loss": 0.2793, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.12566722586548726, |
| "grad_norm": 0.2617035107047438, |
| "learning_rate": 6.25e-06, |
| "loss": 0.2868, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.1262772609425042, |
| "grad_norm": 0.22489028420899687, |
| "learning_rate": 6.280487804878049e-06, |
| "loss": 0.2502, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.12688729601952112, |
| "grad_norm": 0.23908878727484426, |
| "learning_rate": 6.310975609756098e-06, |
| "loss": 0.2842, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.12749733109653805, |
| "grad_norm": 0.24229667129434546, |
| "learning_rate": 6.341463414634147e-06, |
| "loss": 0.2729, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.12810736617355498, |
| "grad_norm": 0.23997663988223025, |
| "learning_rate": 6.371951219512196e-06, |
| "loss": 0.2469, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.1287174012505719, |
| "grad_norm": 0.24052895148755218, |
| "learning_rate": 6.402439024390244e-06, |
| "loss": 0.2767, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.12932743632758884, |
| "grad_norm": 0.24249027732334763, |
| "learning_rate": 6.432926829268293e-06, |
| "loss": 0.2458, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.12993747140460576, |
| "grad_norm": 0.25137563661292134, |
| "learning_rate": 6.463414634146342e-06, |
| "loss": 0.273, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.1305475064816227, |
| "grad_norm": 0.2587508767905881, |
| "learning_rate": 6.493902439024391e-06, |
| "loss": 0.2677, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.13115754155863962, |
| "grad_norm": 0.2436596949895466, |
| "learning_rate": 6.52439024390244e-06, |
| "loss": 0.2576, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.13176757663565655, |
| "grad_norm": 0.24764204486776847, |
| "learning_rate": 6.554878048780489e-06, |
| "loss": 0.2621, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.13237761171267348, |
| "grad_norm": 0.25726179008925426, |
| "learning_rate": 6.585365853658538e-06, |
| "loss": 0.2757, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.1329876467896904, |
| "grad_norm": 0.2569737346876388, |
| "learning_rate": 6.615853658536586e-06, |
| "loss": 0.2686, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.13359768186670734, |
| "grad_norm": 0.24488929677587262, |
| "learning_rate": 6.646341463414635e-06, |
| "loss": 0.2703, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.13420771694372427, |
| "grad_norm": 0.2625226665018392, |
| "learning_rate": 6.676829268292684e-06, |
| "loss": 0.2536, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.1348177520207412, |
| "grad_norm": 0.32829586478398526, |
| "learning_rate": 6.707317073170733e-06, |
| "loss": 0.2851, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.13542778709775813, |
| "grad_norm": 0.24381114982471824, |
| "learning_rate": 6.737804878048782e-06, |
| "loss": 0.2725, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.13603782217477506, |
| "grad_norm": 0.2537539468384488, |
| "learning_rate": 6.768292682926831e-06, |
| "loss": 0.2702, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.136647857251792, |
| "grad_norm": 0.24029077547019123, |
| "learning_rate": 6.798780487804879e-06, |
| "loss": 0.2506, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.13725789232880892, |
| "grad_norm": 0.25072886281019574, |
| "learning_rate": 6.829268292682928e-06, |
| "loss": 0.2754, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.13786792740582585, |
| "grad_norm": 0.2622525846487408, |
| "learning_rate": 6.859756097560977e-06, |
| "loss": 0.275, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.13847796248284278, |
| "grad_norm": 0.27373368686851934, |
| "learning_rate": 6.890243902439025e-06, |
| "loss": 0.2584, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.1390879975598597, |
| "grad_norm": 0.28138028268480203, |
| "learning_rate": 6.920731707317073e-06, |
| "loss": 0.273, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.1396980326368766, |
| "grad_norm": 0.2506330599580721, |
| "learning_rate": 6.951219512195122e-06, |
| "loss": 0.281, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.14030806771389354, |
| "grad_norm": 0.26945292402465437, |
| "learning_rate": 6.981707317073171e-06, |
| "loss": 0.2584, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.14091810279091047, |
| "grad_norm": 0.25764293041454384, |
| "learning_rate": 7.01219512195122e-06, |
| "loss": 0.2693, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.1415281378679274, |
| "grad_norm": 0.24772244759620793, |
| "learning_rate": 7.042682926829269e-06, |
| "loss": 0.2831, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.14213817294494432, |
| "grad_norm": 0.24429762825269066, |
| "learning_rate": 7.0731707317073175e-06, |
| "loss": 0.2669, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.14274820802196125, |
| "grad_norm": 0.2341275505054125, |
| "learning_rate": 7.1036585365853665e-06, |
| "loss": 0.2627, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.14335824309897818, |
| "grad_norm": 0.2516323459329928, |
| "learning_rate": 7.1341463414634146e-06, |
| "loss": 0.2486, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.1439682781759951, |
| "grad_norm": 0.24792052063072995, |
| "learning_rate": 7.1646341463414635e-06, |
| "loss": 0.2571, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.14457831325301204, |
| "grad_norm": 0.24838425917856113, |
| "learning_rate": 7.1951219512195125e-06, |
| "loss": 0.2565, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.14518834833002897, |
| "grad_norm": 0.2558154886637839, |
| "learning_rate": 7.225609756097561e-06, |
| "loss": 0.2602, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.1457983834070459, |
| "grad_norm": 0.24095801169637035, |
| "learning_rate": 7.25609756097561e-06, |
| "loss": 0.276, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.14640841848406283, |
| "grad_norm": 0.24456191278684733, |
| "learning_rate": 7.286585365853659e-06, |
| "loss": 0.2612, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.14701845356107976, |
| "grad_norm": 0.24059500548054316, |
| "learning_rate": 7.317073170731707e-06, |
| "loss": 0.2528, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.1476284886380967, |
| "grad_norm": 0.24031870127070679, |
| "learning_rate": 7.347560975609756e-06, |
| "loss": 0.257, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.14823852371511362, |
| "grad_norm": 0.2928644520428681, |
| "learning_rate": 7.378048780487805e-06, |
| "loss": 0.2973, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.14884855879213055, |
| "grad_norm": 0.2436600942196456, |
| "learning_rate": 7.408536585365854e-06, |
| "loss": 0.2527, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.14945859386914748, |
| "grad_norm": 0.24910229884619056, |
| "learning_rate": 7.439024390243903e-06, |
| "loss": 0.2573, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.1500686289461644, |
| "grad_norm": 0.257984994933436, |
| "learning_rate": 7.469512195121952e-06, |
| "loss": 0.2854, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.15067866402318134, |
| "grad_norm": 0.26224460707949426, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.2462, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.15128869910019827, |
| "grad_norm": 0.23911637788829992, |
| "learning_rate": 7.530487804878049e-06, |
| "loss": 0.258, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.1518987341772152, |
| "grad_norm": 0.26106166538596853, |
| "learning_rate": 7.560975609756098e-06, |
| "loss": 0.2395, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.15250876925423212, |
| "grad_norm": 0.2628905138359254, |
| "learning_rate": 7.591463414634147e-06, |
| "loss": 0.2557, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.15311880433124905, |
| "grad_norm": 0.24772668368639703, |
| "learning_rate": 7.621951219512196e-06, |
| "loss": 0.2612, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.15372883940826598, |
| "grad_norm": 0.2614036863610298, |
| "learning_rate": 7.652439024390244e-06, |
| "loss": 0.2418, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.1543388744852829, |
| "grad_norm": 0.2533666402036696, |
| "learning_rate": 7.682926829268293e-06, |
| "loss": 0.2676, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.15494890956229984, |
| "grad_norm": 0.2646349439683426, |
| "learning_rate": 7.713414634146342e-06, |
| "loss": 0.2807, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.15555894463931677, |
| "grad_norm": 0.25217251940804086, |
| "learning_rate": 7.743902439024391e-06, |
| "loss": 0.2648, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.1561689797163337, |
| "grad_norm": 0.2934490177685301, |
| "learning_rate": 7.77439024390244e-06, |
| "loss": 0.242, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.15677901479335063, |
| "grad_norm": 0.23774700056796438, |
| "learning_rate": 7.804878048780489e-06, |
| "loss": 0.2628, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.15738904987036756, |
| "grad_norm": 0.2794132704089381, |
| "learning_rate": 7.835365853658538e-06, |
| "loss": 0.2794, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.1579990849473845, |
| "grad_norm": 0.24213388227268773, |
| "learning_rate": 7.865853658536587e-06, |
| "loss": 0.2647, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.1586091200244014, |
| "grad_norm": 0.25082026136954216, |
| "learning_rate": 7.896341463414636e-06, |
| "loss": 0.2637, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.15921915510141832, |
| "grad_norm": 0.2431946921818769, |
| "learning_rate": 7.926829268292685e-06, |
| "loss": 0.2626, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.15982919017843525, |
| "grad_norm": 0.2442925275620386, |
| "learning_rate": 7.957317073170733e-06, |
| "loss": 0.2614, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.16043922525545218, |
| "grad_norm": 0.258570314023015, |
| "learning_rate": 7.98780487804878e-06, |
| "loss": 0.2598, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.1610492603324691, |
| "grad_norm": 0.24980544348638753, |
| "learning_rate": 8.01829268292683e-06, |
| "loss": 0.2682, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.16165929540948604, |
| "grad_norm": 0.22851755945890959, |
| "learning_rate": 8.048780487804879e-06, |
| "loss": 0.2383, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.16226933048650297, |
| "grad_norm": 0.2742225825226428, |
| "learning_rate": 8.079268292682928e-06, |
| "loss": 0.2613, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.1628793655635199, |
| "grad_norm": 0.2363713257999274, |
| "learning_rate": 8.109756097560977e-06, |
| "loss": 0.2444, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.16348940064053682, |
| "grad_norm": 0.28248352582246355, |
| "learning_rate": 8.140243902439024e-06, |
| "loss": 0.2696, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.16409943571755375, |
| "grad_norm": 0.25728717998996453, |
| "learning_rate": 8.170731707317073e-06, |
| "loss": 0.2773, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.16470947079457068, |
| "grad_norm": 0.3007522019243012, |
| "learning_rate": 8.201219512195122e-06, |
| "loss": 0.2508, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.1653195058715876, |
| "grad_norm": 0.2640301074207759, |
| "learning_rate": 8.23170731707317e-06, |
| "loss": 0.2726, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.16592954094860454, |
| "grad_norm": 0.27589684620981636, |
| "learning_rate": 8.26219512195122e-06, |
| "loss": 0.2594, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.16653957602562147, |
| "grad_norm": 0.2603256028043959, |
| "learning_rate": 8.292682926829268e-06, |
| "loss": 0.2548, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.1671496111026384, |
| "grad_norm": 0.2543287160421388, |
| "learning_rate": 8.323170731707317e-06, |
| "loss": 0.2639, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.16775964617965533, |
| "grad_norm": 0.2663572684486127, |
| "learning_rate": 8.353658536585366e-06, |
| "loss": 0.2734, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.16836968125667226, |
| "grad_norm": 0.24382846053208523, |
| "learning_rate": 8.384146341463415e-06, |
| "loss": 0.2567, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.1689797163336892, |
| "grad_norm": 0.31604403568590417, |
| "learning_rate": 8.414634146341464e-06, |
| "loss": 0.2672, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.16958975141070612, |
| "grad_norm": 0.27720443335337075, |
| "learning_rate": 8.445121951219513e-06, |
| "loss": 0.2584, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.17019978648772305, |
| "grad_norm": 0.24158731216102847, |
| "learning_rate": 8.475609756097562e-06, |
| "loss": 0.2545, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.17080982156473998, |
| "grad_norm": 0.26559843371987457, |
| "learning_rate": 8.506097560975611e-06, |
| "loss": 0.2686, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.1714198566417569, |
| "grad_norm": 0.25167456958331147, |
| "learning_rate": 8.536585365853658e-06, |
| "loss": 0.2669, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.17202989171877384, |
| "grad_norm": 0.24990090478397206, |
| "learning_rate": 8.567073170731707e-06, |
| "loss": 0.252, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.17263992679579077, |
| "grad_norm": 0.26565282242517324, |
| "learning_rate": 8.597560975609756e-06, |
| "loss": 0.2612, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.1732499618728077, |
| "grad_norm": 0.2612900682088091, |
| "learning_rate": 8.628048780487805e-06, |
| "loss": 0.2693, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.17385999694982462, |
| "grad_norm": 0.2542685727592583, |
| "learning_rate": 8.658536585365854e-06, |
| "loss": 0.2497, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.17447003202684155, |
| "grad_norm": 0.2581637078007705, |
| "learning_rate": 8.689024390243903e-06, |
| "loss": 0.2625, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.17508006710385848, |
| "grad_norm": 0.26869785766327753, |
| "learning_rate": 8.719512195121952e-06, |
| "loss": 0.2854, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.1756901021808754, |
| "grad_norm": 0.2522640376457165, |
| "learning_rate": 8.750000000000001e-06, |
| "loss": 0.2517, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.17630013725789234, |
| "grad_norm": 0.2514912970501586, |
| "learning_rate": 8.78048780487805e-06, |
| "loss": 0.2643, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.17691017233490924, |
| "grad_norm": 0.2670649726886704, |
| "learning_rate": 8.810975609756099e-06, |
| "loss": 0.2691, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.17752020741192617, |
| "grad_norm": 0.2532280359730121, |
| "learning_rate": 8.841463414634148e-06, |
| "loss": 0.2766, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.1781302424889431, |
| "grad_norm": 0.23982871280272544, |
| "learning_rate": 8.871951219512197e-06, |
| "loss": 0.2623, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.17874027756596003, |
| "grad_norm": 0.22291820269074192, |
| "learning_rate": 8.902439024390244e-06, |
| "loss": 0.2542, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.17935031264297696, |
| "grad_norm": 0.24373539859994214, |
| "learning_rate": 8.932926829268293e-06, |
| "loss": 0.275, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.1799603477199939, |
| "grad_norm": 0.251820372702038, |
| "learning_rate": 8.963414634146342e-06, |
| "loss": 0.2699, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.18057038279701082, |
| "grad_norm": 0.2341623068040451, |
| "learning_rate": 8.99390243902439e-06, |
| "loss": 0.2503, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.18118041787402775, |
| "grad_norm": 0.23554175402707625, |
| "learning_rate": 9.02439024390244e-06, |
| "loss": 0.2423, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.18179045295104468, |
| "grad_norm": 0.24651028549190948, |
| "learning_rate": 9.054878048780489e-06, |
| "loss": 0.2501, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.1824004880280616, |
| "grad_norm": 0.23298368631390087, |
| "learning_rate": 9.085365853658538e-06, |
| "loss": 0.2486, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.18301052310507854, |
| "grad_norm": 0.25387542394056234, |
| "learning_rate": 9.115853658536587e-06, |
| "loss": 0.2554, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.18362055818209547, |
| "grad_norm": 0.2743582744551358, |
| "learning_rate": 9.146341463414635e-06, |
| "loss": 0.3009, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.1842305932591124, |
| "grad_norm": 0.23267894105241863, |
| "learning_rate": 9.176829268292684e-06, |
| "loss": 0.2578, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.18484062833612933, |
| "grad_norm": 0.23211732643003208, |
| "learning_rate": 9.207317073170733e-06, |
| "loss": 0.245, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.18545066341314626, |
| "grad_norm": 0.27622864545287684, |
| "learning_rate": 9.237804878048782e-06, |
| "loss": 0.2789, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.18606069849016318, |
| "grad_norm": 0.24220813262242386, |
| "learning_rate": 9.268292682926831e-06, |
| "loss": 0.2517, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.1866707335671801, |
| "grad_norm": 0.24023476810215502, |
| "learning_rate": 9.298780487804879e-06, |
| "loss": 0.2671, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.18728076864419704, |
| "grad_norm": 0.25961081713489753, |
| "learning_rate": 9.329268292682927e-06, |
| "loss": 0.2639, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.18789080372121397, |
| "grad_norm": 0.2561706447722481, |
| "learning_rate": 9.359756097560976e-06, |
| "loss": 0.264, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.1885008387982309, |
| "grad_norm": 0.24437473491122833, |
| "learning_rate": 9.390243902439025e-06, |
| "loss": 0.2604, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.18911087387524783, |
| "grad_norm": 0.2582368491714471, |
| "learning_rate": 9.420731707317073e-06, |
| "loss": 0.2438, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.18972090895226476, |
| "grad_norm": 0.24897032920997206, |
| "learning_rate": 9.451219512195122e-06, |
| "loss": 0.2579, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.1903309440292817, |
| "grad_norm": 0.26649069100002015, |
| "learning_rate": 9.48170731707317e-06, |
| "loss": 0.2669, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.19094097910629862, |
| "grad_norm": 0.2648892505808558, |
| "learning_rate": 9.51219512195122e-06, |
| "loss": 0.2696, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.19155101418331555, |
| "grad_norm": 0.23334740105373933, |
| "learning_rate": 9.542682926829268e-06, |
| "loss": 0.2448, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.19216104926033248, |
| "grad_norm": 0.246437572595513, |
| "learning_rate": 9.573170731707317e-06, |
| "loss": 0.2495, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.1927710843373494, |
| "grad_norm": 0.2544520516547441, |
| "learning_rate": 9.603658536585366e-06, |
| "loss": 0.2731, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.19338111941436634, |
| "grad_norm": 0.2588261171385072, |
| "learning_rate": 9.634146341463415e-06, |
| "loss": 0.2502, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.19399115449138327, |
| "grad_norm": 0.247020197460797, |
| "learning_rate": 9.664634146341464e-06, |
| "loss": 0.2431, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.1946011895684002, |
| "grad_norm": 0.2697722255574314, |
| "learning_rate": 9.695121951219513e-06, |
| "loss": 0.2861, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.19521122464541713, |
| "grad_norm": 0.25055251104844295, |
| "learning_rate": 9.725609756097562e-06, |
| "loss": 0.2634, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.19582125972243403, |
| "grad_norm": 0.25664030673072813, |
| "learning_rate": 9.756097560975611e-06, |
| "loss": 0.2594, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.19643129479945096, |
| "grad_norm": 0.2568807633504417, |
| "learning_rate": 9.78658536585366e-06, |
| "loss": 0.2602, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.19704132987646789, |
| "grad_norm": 0.30938855639166934, |
| "learning_rate": 9.817073170731707e-06, |
| "loss": 0.2646, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.19765136495348481, |
| "grad_norm": 0.25640607299564616, |
| "learning_rate": 9.847560975609756e-06, |
| "loss": 0.2557, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.19826140003050174, |
| "grad_norm": 0.2755277016357419, |
| "learning_rate": 9.878048780487805e-06, |
| "loss": 0.2676, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.19887143510751867, |
| "grad_norm": 0.2828434225373175, |
| "learning_rate": 9.908536585365854e-06, |
| "loss": 0.278, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.1994814701845356, |
| "grad_norm": 0.25703933013169117, |
| "learning_rate": 9.939024390243903e-06, |
| "loss": 0.252, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.20009150526155253, |
| "grad_norm": 0.23354984579219248, |
| "learning_rate": 9.969512195121952e-06, |
| "loss": 0.2464, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.20070154033856946, |
| "grad_norm": 0.26361369945265395, |
| "learning_rate": 1e-05, |
| "loss": 0.2502, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.2013115754155864, |
| "grad_norm": 0.2507652167428992, |
| "learning_rate": 9.999997168562324e-06, |
| "loss": 0.2673, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.20192161049260332, |
| "grad_norm": 0.27304236135561094, |
| "learning_rate": 9.9999886742525e-06, |
| "loss": 0.2598, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.20253164556962025, |
| "grad_norm": 0.2287359091408107, |
| "learning_rate": 9.99997451708015e-06, |
| "loss": 0.2249, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.20314168064663718, |
| "grad_norm": 0.2673246266757542, |
| "learning_rate": 9.999954697061305e-06, |
| "loss": 0.2487, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.2037517157236541, |
| "grad_norm": 0.25713057306638515, |
| "learning_rate": 9.999929214218419e-06, |
| "loss": 0.2596, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.20436175080067104, |
| "grad_norm": 0.24997141485448354, |
| "learning_rate": 9.999898068580346e-06, |
| "loss": 0.2632, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.20497178587768797, |
| "grad_norm": 0.26986376068528667, |
| "learning_rate": 9.999861260182366e-06, |
| "loss": 0.2508, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.2055818209547049, |
| "grad_norm": 0.25089494428271947, |
| "learning_rate": 9.999818789066164e-06, |
| "loss": 0.262, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.20619185603172183, |
| "grad_norm": 0.26370819630042563, |
| "learning_rate": 9.999770655279843e-06, |
| "loss": 0.2579, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.20680189110873876, |
| "grad_norm": 0.2951528596227915, |
| "learning_rate": 9.99971685887792e-06, |
| "loss": 0.2843, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.20741192618575569, |
| "grad_norm": 0.25114828422920793, |
| "learning_rate": 9.999657399921321e-06, |
| "loss": 0.2701, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.20802196126277261, |
| "grad_norm": 0.2542038881881685, |
| "learning_rate": 9.999592278477389e-06, |
| "loss": 0.264, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.20863199633978954, |
| "grad_norm": 0.2783939826492031, |
| "learning_rate": 9.999521494619876e-06, |
| "loss": 0.2705, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.20924203141680647, |
| "grad_norm": 0.2387066722281608, |
| "learning_rate": 9.999445048428952e-06, |
| "loss": 0.261, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.2098520664938234, |
| "grad_norm": 0.2577640282603642, |
| "learning_rate": 9.999362939991202e-06, |
| "loss": 0.2521, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.21046210157084033, |
| "grad_norm": 0.2277437965064566, |
| "learning_rate": 9.999275169399614e-06, |
| "loss": 0.2401, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.21107213664785726, |
| "grad_norm": 0.2607184798714529, |
| "learning_rate": 9.999181736753598e-06, |
| "loss": 0.2368, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.2116821717248742, |
| "grad_norm": 0.2743794302014217, |
| "learning_rate": 9.999082642158972e-06, |
| "loss": 0.25, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.21229220680189112, |
| "grad_norm": 0.27233210453368745, |
| "learning_rate": 9.99897788572797e-06, |
| "loss": 0.2836, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.21290224187890805, |
| "grad_norm": 0.25517501025766626, |
| "learning_rate": 9.998867467579234e-06, |
| "loss": 0.253, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.21351227695592498, |
| "grad_norm": 0.2579378404488621, |
| "learning_rate": 9.998751387837822e-06, |
| "loss": 0.2616, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.21412231203294188, |
| "grad_norm": 0.2812216022431848, |
| "learning_rate": 9.998629646635203e-06, |
| "loss": 0.2586, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.2147323471099588, |
| "grad_norm": 0.24197851638553763, |
| "learning_rate": 9.99850224410926e-06, |
| "loss": 0.2422, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.21534238218697574, |
| "grad_norm": 0.27306080649708614, |
| "learning_rate": 9.998369180404283e-06, |
| "loss": 0.2671, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.21595241726399267, |
| "grad_norm": 0.357489524633953, |
| "learning_rate": 9.998230455670978e-06, |
| "loss": 0.2522, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.2165624523410096, |
| "grad_norm": 0.2697247457552434, |
| "learning_rate": 9.998086070066459e-06, |
| "loss": 0.2809, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.21717248741802653, |
| "grad_norm": 0.2620363759776813, |
| "learning_rate": 9.997936023754258e-06, |
| "loss": 0.26, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.21778252249504346, |
| "grad_norm": 0.25870971628871203, |
| "learning_rate": 9.99778031690431e-06, |
| "loss": 0.2767, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.2183925575720604, |
| "grad_norm": 0.2312512617517111, |
| "learning_rate": 9.997618949692966e-06, |
| "loss": 0.2467, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.21900259264907732, |
| "grad_norm": 0.2731114305931224, |
| "learning_rate": 9.997451922302987e-06, |
| "loss": 0.2346, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.21961262772609424, |
| "grad_norm": 0.25657499501781883, |
| "learning_rate": 9.997279234923542e-06, |
| "loss": 0.2504, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.22022266280311117, |
| "grad_norm": 0.2510163321878893, |
| "learning_rate": 9.997100887750215e-06, |
| "loss": 0.2517, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.2208326978801281, |
| "grad_norm": 0.25272708866757615, |
| "learning_rate": 9.996916880984995e-06, |
| "loss": 0.2477, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.22144273295714503, |
| "grad_norm": 0.2584798059801417, |
| "learning_rate": 9.996727214836286e-06, |
| "loss": 0.2509, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.22205276803416196, |
| "grad_norm": 0.27124501953300556, |
| "learning_rate": 9.996531889518898e-06, |
| "loss": 0.2635, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.2226628031111789, |
| "grad_norm": 0.2807475690600024, |
| "learning_rate": 9.99633090525405e-06, |
| "loss": 0.2658, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.22327283818819582, |
| "grad_norm": 0.26904250476142233, |
| "learning_rate": 9.996124262269376e-06, |
| "loss": 0.2446, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.22388287326521275, |
| "grad_norm": 0.2680294619993861, |
| "learning_rate": 9.99591196079891e-06, |
| "loss": 0.2595, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.22449290834222968, |
| "grad_norm": 0.285142520335724, |
| "learning_rate": 9.995694001083103e-06, |
| "loss": 0.2638, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.2251029434192466, |
| "grad_norm": 0.27597769384607446, |
| "learning_rate": 9.995470383368808e-06, |
| "loss": 0.2659, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.22571297849626354, |
| "grad_norm": 0.30424817017406613, |
| "learning_rate": 9.99524110790929e-06, |
| "loss": 0.2714, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.22632301357328047, |
| "grad_norm": 0.25721127229254476, |
| "learning_rate": 9.99500617496422e-06, |
| "loss": 0.2611, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.2269330486502974, |
| "grad_norm": 0.2751301645393888, |
| "learning_rate": 9.994765584799679e-06, |
| "loss": 0.2618, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.22754308372731433, |
| "grad_norm": 0.2692736088441834, |
| "learning_rate": 9.994519337688152e-06, |
| "loss": 0.2492, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.22815311880433126, |
| "grad_norm": 0.25304783010432796, |
| "learning_rate": 9.994267433908533e-06, |
| "loss": 0.2527, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.22876315388134819, |
| "grad_norm": 0.2863607765873851, |
| "learning_rate": 9.994009873746121e-06, |
| "loss": 0.2608, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.22937318895836512, |
| "grad_norm": 0.26194638397821157, |
| "learning_rate": 9.993746657492622e-06, |
| "loss": 0.2673, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.22998322403538204, |
| "grad_norm": 0.26236574139222973, |
| "learning_rate": 9.993477785446151e-06, |
| "loss": 0.2449, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.23059325911239897, |
| "grad_norm": 0.2750923719520727, |
| "learning_rate": 9.993203257911222e-06, |
| "loss": 0.2683, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.2312032941894159, |
| "grad_norm": 0.28560193546700424, |
| "learning_rate": 9.99292307519876e-06, |
| "loss": 0.254, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.23181332926643283, |
| "grad_norm": 0.3132035003182686, |
| "learning_rate": 9.992637237626092e-06, |
| "loss": 0.2523, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.23242336434344976, |
| "grad_norm": 0.2825046469057102, |
| "learning_rate": 9.992345745516954e-06, |
| "loss": 0.2815, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.23303339942046666, |
| "grad_norm": 0.28466890282763896, |
| "learning_rate": 9.992048599201478e-06, |
| "loss": 0.2652, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.2336434344974836, |
| "grad_norm": 0.26975445758231287, |
| "learning_rate": 9.991745799016206e-06, |
| "loss": 0.2612, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.23425346957450052, |
| "grad_norm": 0.23586273367056518, |
| "learning_rate": 9.991437345304084e-06, |
| "loss": 0.2596, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.23486350465151745, |
| "grad_norm": 0.30573359370561365, |
| "learning_rate": 9.991123238414455e-06, |
| "loss": 0.2524, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.23547353972853438, |
| "grad_norm": 0.2630216344835691, |
| "learning_rate": 9.990803478703073e-06, |
| "loss": 0.2446, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.2360835748055513, |
| "grad_norm": 0.25756493585689366, |
| "learning_rate": 9.990478066532088e-06, |
| "loss": 0.2303, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.23669360988256824, |
| "grad_norm": 0.2694584250938353, |
| "learning_rate": 9.990147002270051e-06, |
| "loss": 0.2526, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.23730364495958517, |
| "grad_norm": 0.28404877974160536, |
| "learning_rate": 9.989810286291923e-06, |
| "loss": 0.2578, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.2379136800366021, |
| "grad_norm": 0.24306934901870883, |
| "learning_rate": 9.989467918979055e-06, |
| "loss": 0.2508, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.23852371511361903, |
| "grad_norm": 0.27162306691858007, |
| "learning_rate": 9.989119900719206e-06, |
| "loss": 0.2464, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.23913375019063596, |
| "grad_norm": 0.267591647087372, |
| "learning_rate": 9.988766231906532e-06, |
| "loss": 0.2669, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.2397437852676529, |
| "grad_norm": 0.250629654500331, |
| "learning_rate": 9.988406912941591e-06, |
| "loss": 0.2599, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.24035382034466982, |
| "grad_norm": 0.27800678255553674, |
| "learning_rate": 9.988041944231335e-06, |
| "loss": 0.266, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.24096385542168675, |
| "grad_norm": 0.2646178524474114, |
| "learning_rate": 9.987671326189123e-06, |
| "loss": 0.2297, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.24157389049870368, |
| "grad_norm": 0.2618422966309834, |
| "learning_rate": 9.987295059234704e-06, |
| "loss": 0.2593, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.2421839255757206, |
| "grad_norm": 0.26039061767925137, |
| "learning_rate": 9.986913143794232e-06, |
| "loss": 0.2594, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.24279396065273753, |
| "grad_norm": 0.43661380805158756, |
| "learning_rate": 9.986525580300253e-06, |
| "loss": 0.2553, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.24340399572975446, |
| "grad_norm": 0.2521963798608539, |
| "learning_rate": 9.986132369191712e-06, |
| "loss": 0.2386, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.2440140308067714, |
| "grad_norm": 0.2522149023961132, |
| "learning_rate": 9.98573351091395e-06, |
| "loss": 0.2489, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.24462406588378832, |
| "grad_norm": 0.24778130062040454, |
| "learning_rate": 9.985329005918702e-06, |
| "loss": 0.2291, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.24523410096080525, |
| "grad_norm": 0.2569038342607788, |
| "learning_rate": 9.984918854664105e-06, |
| "loss": 0.2612, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.24584413603782218, |
| "grad_norm": 0.25041191695544, |
| "learning_rate": 9.984503057614684e-06, |
| "loss": 0.2576, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.2464541711148391, |
| "grad_norm": 0.24717652132981327, |
| "learning_rate": 9.984081615241356e-06, |
| "loss": 0.2495, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.24706420619185604, |
| "grad_norm": 0.26673537646912776, |
| "learning_rate": 9.983654528021442e-06, |
| "loss": 0.2689, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.24767424126887297, |
| "grad_norm": 0.2465342997361249, |
| "learning_rate": 9.98322179643865e-06, |
| "loss": 0.2616, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.2482842763458899, |
| "grad_norm": 0.28245378040509006, |
| "learning_rate": 9.982783420983075e-06, |
| "loss": 0.2511, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.24889431142290683, |
| "grad_norm": 0.26880312709115456, |
| "learning_rate": 9.982339402151217e-06, |
| "loss": 0.2327, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.24950434649992376, |
| "grad_norm": 0.254261386844422, |
| "learning_rate": 9.981889740445958e-06, |
| "loss": 0.2388, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.25011438157694066, |
| "grad_norm": 0.26583061665924723, |
| "learning_rate": 9.981434436376572e-06, |
| "loss": 0.2735, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.2507244166539576, |
| "grad_norm": 0.2537203710329665, |
| "learning_rate": 9.980973490458728e-06, |
| "loss": 0.2802, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.2513344517309745, |
| "grad_norm": 0.2669060501581036, |
| "learning_rate": 9.980506903214481e-06, |
| "loss": 0.2564, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.2519444868079915, |
| "grad_norm": 0.2559541236925637, |
| "learning_rate": 9.980034675172274e-06, |
| "loss": 0.2432, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.2525545218850084, |
| "grad_norm": 0.2558690804133373, |
| "learning_rate": 9.979556806866943e-06, |
| "loss": 0.2535, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.25316455696202533, |
| "grad_norm": 0.25826409987008647, |
| "learning_rate": 9.97907329883971e-06, |
| "loss": 0.2762, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.25377459203904223, |
| "grad_norm": 0.26642742809887227, |
| "learning_rate": 9.978584151638182e-06, |
| "loss": 0.2697, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.2543846271160592, |
| "grad_norm": 0.23619289173506056, |
| "learning_rate": 9.978089365816357e-06, |
| "loss": 0.24, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.2549946621930761, |
| "grad_norm": 0.2540500448608682, |
| "learning_rate": 9.977588941934615e-06, |
| "loss": 0.2762, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.25560469727009305, |
| "grad_norm": 0.2565053314392929, |
| "learning_rate": 9.977082880559725e-06, |
| "loss": 0.27, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.25621473234710995, |
| "grad_norm": 0.24569647149613266, |
| "learning_rate": 9.97657118226484e-06, |
| "loss": 0.2471, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.2568247674241269, |
| "grad_norm": 0.24132230841255492, |
| "learning_rate": 9.976053847629496e-06, |
| "loss": 0.2463, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.2574348025011438, |
| "grad_norm": 0.23527712284133528, |
| "learning_rate": 9.975530877239613e-06, |
| "loss": 0.2345, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.25804483757816077, |
| "grad_norm": 0.2769563848341972, |
| "learning_rate": 9.975002271687496e-06, |
| "loss": 0.2664, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.25865487265517767, |
| "grad_norm": 0.3293393338967704, |
| "learning_rate": 9.974468031571825e-06, |
| "loss": 0.2674, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.2592649077321946, |
| "grad_norm": 0.2602552909440054, |
| "learning_rate": 9.973928157497675e-06, |
| "loss": 0.2832, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.25987494280921153, |
| "grad_norm": 0.2580342895295799, |
| "learning_rate": 9.973382650076488e-06, |
| "loss": 0.2629, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.26048497788622843, |
| "grad_norm": 0.26747490402513346, |
| "learning_rate": 9.972831509926094e-06, |
| "loss": 0.2719, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.2610950129632454, |
| "grad_norm": 0.23993540925898904, |
| "learning_rate": 9.972274737670702e-06, |
| "loss": 0.2363, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.2617050480402623, |
| "grad_norm": 0.30395384010333787, |
| "learning_rate": 9.971712333940896e-06, |
| "loss": 0.271, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.26231508311727925, |
| "grad_norm": 0.2584379936178291, |
| "learning_rate": 9.971144299373643e-06, |
| "loss": 0.2482, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.26292511819429615, |
| "grad_norm": 0.24603617492477198, |
| "learning_rate": 9.970570634612282e-06, |
| "loss": 0.2542, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.2635351532713131, |
| "grad_norm": 0.2505894181965195, |
| "learning_rate": 9.969991340306533e-06, |
| "loss": 0.2404, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.26414518834833, |
| "grad_norm": 0.24674783311739618, |
| "learning_rate": 9.969406417112489e-06, |
| "loss": 0.2363, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.26475522342534696, |
| "grad_norm": 0.35440716800891336, |
| "learning_rate": 9.968815865692622e-06, |
| "loss": 0.2475, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.26536525850236387, |
| "grad_norm": 0.2744766335288482, |
| "learning_rate": 9.968219686715773e-06, |
| "loss": 0.2581, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.2659752935793808, |
| "grad_norm": 0.2674838161349361, |
| "learning_rate": 9.96761788085716e-06, |
| "loss": 0.2638, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.2665853286563977, |
| "grad_norm": 0.2520490283184246, |
| "learning_rate": 9.967010448798376e-06, |
| "loss": 0.2529, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.2671953637334147, |
| "grad_norm": 0.31522100232197137, |
| "learning_rate": 9.96639739122738e-06, |
| "loss": 0.2696, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.2678053988104316, |
| "grad_norm": 0.26977483001310687, |
| "learning_rate": 9.965778708838509e-06, |
| "loss": 0.2372, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.26841543388744854, |
| "grad_norm": 0.24287141515808836, |
| "learning_rate": 9.965154402332464e-06, |
| "loss": 0.2399, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.26902546896446544, |
| "grad_norm": 0.2739660973493756, |
| "learning_rate": 9.964524472416319e-06, |
| "loss": 0.2529, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.2696355040414824, |
| "grad_norm": 0.26189624333130374, |
| "learning_rate": 9.96388891980352e-06, |
| "loss": 0.2549, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.2702455391184993, |
| "grad_norm": 0.26116140101705765, |
| "learning_rate": 9.963247745213876e-06, |
| "loss": 0.2613, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.27085557419551626, |
| "grad_norm": 0.2698487180887434, |
| "learning_rate": 9.962600949373567e-06, |
| "loss": 0.2635, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.27146560927253316, |
| "grad_norm": 0.24553015492634403, |
| "learning_rate": 9.961948533015135e-06, |
| "loss": 0.2442, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.2720756443495501, |
| "grad_norm": 0.23926699877002677, |
| "learning_rate": 9.961290496877492e-06, |
| "loss": 0.2598, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.272685679426567, |
| "grad_norm": 0.2765901598887178, |
| "learning_rate": 9.960626841705913e-06, |
| "loss": 0.2768, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.273295714503584, |
| "grad_norm": 0.24629883798169278, |
| "learning_rate": 9.95995756825204e-06, |
| "loss": 0.2649, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.2739057495806009, |
| "grad_norm": 0.26061938760384895, |
| "learning_rate": 9.959282677273869e-06, |
| "loss": 0.2599, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.27451578465761783, |
| "grad_norm": 0.25357892001252424, |
| "learning_rate": 9.95860216953577e-06, |
| "loss": 0.2277, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.27512581973463474, |
| "grad_norm": 0.24102293841362316, |
| "learning_rate": 9.95791604580847e-06, |
| "loss": 0.2368, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.2757358548116517, |
| "grad_norm": 0.2656199764240277, |
| "learning_rate": 9.957224306869053e-06, |
| "loss": 0.2403, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.2763458898886686, |
| "grad_norm": 0.26931339295947415, |
| "learning_rate": 9.956526953500965e-06, |
| "loss": 0.2528, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.27695592496568555, |
| "grad_norm": 0.25037961889937077, |
| "learning_rate": 9.955823986494012e-06, |
| "loss": 0.2625, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.27756596004270245, |
| "grad_norm": 0.25439066559296586, |
| "learning_rate": 9.955115406644357e-06, |
| "loss": 0.2665, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.2781759951197194, |
| "grad_norm": 0.25659990784327696, |
| "learning_rate": 9.95440121475452e-06, |
| "loss": 0.2415, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.2787860301967363, |
| "grad_norm": 0.24662984651313433, |
| "learning_rate": 9.953681411633376e-06, |
| "loss": 0.2545, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.2793960652737532, |
| "grad_norm": 0.2558250167454128, |
| "learning_rate": 9.952955998096155e-06, |
| "loss": 0.2488, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.28000610035077017, |
| "grad_norm": 0.23806375709432726, |
| "learning_rate": 9.952224974964446e-06, |
| "loss": 0.2284, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.2806161354277871, |
| "grad_norm": 0.24600626960166394, |
| "learning_rate": 9.951488343066184e-06, |
| "loss": 0.2335, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.28122617050480403, |
| "grad_norm": 0.2811619586861078, |
| "learning_rate": 9.950746103235663e-06, |
| "loss": 0.2628, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.28183620558182093, |
| "grad_norm": 0.24366421930517498, |
| "learning_rate": 9.949998256313523e-06, |
| "loss": 0.2796, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.2824462406588379, |
| "grad_norm": 0.25171729952336713, |
| "learning_rate": 9.949244803146757e-06, |
| "loss": 0.2493, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.2830562757358548, |
| "grad_norm": 0.25433563615871724, |
| "learning_rate": 9.948485744588709e-06, |
| "loss": 0.2588, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.28366631081287175, |
| "grad_norm": 0.2282071902405683, |
| "learning_rate": 9.947721081499068e-06, |
| "loss": 0.2464, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.28427634588988865, |
| "grad_norm": 0.24596986616467853, |
| "learning_rate": 9.946950814743871e-06, |
| "loss": 0.244, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.2848863809669056, |
| "grad_norm": 0.25314080481909895, |
| "learning_rate": 9.946174945195508e-06, |
| "loss": 0.2368, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.2854964160439225, |
| "grad_norm": 0.2480664113962836, |
| "learning_rate": 9.945393473732706e-06, |
| "loss": 0.2467, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.28610645112093946, |
| "grad_norm": 0.24597627536896932, |
| "learning_rate": 9.944606401240538e-06, |
| "loss": 0.2499, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.28671648619795637, |
| "grad_norm": 0.26505498442178216, |
| "learning_rate": 9.943813728610428e-06, |
| "loss": 0.253, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.2873265212749733, |
| "grad_norm": 0.2590414417352618, |
| "learning_rate": 9.943015456740132e-06, |
| "loss": 0.2534, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.2879365563519902, |
| "grad_norm": 0.25994353284833804, |
| "learning_rate": 9.942211586533756e-06, |
| "loss": 0.269, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.2885465914290072, |
| "grad_norm": 0.2757214458321111, |
| "learning_rate": 9.941402118901743e-06, |
| "loss": 0.2538, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.2891566265060241, |
| "grad_norm": 0.22785777363847964, |
| "learning_rate": 9.940587054760875e-06, |
| "loss": 0.2449, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.28976666158304104, |
| "grad_norm": 0.2977878583545315, |
| "learning_rate": 9.939766395034275e-06, |
| "loss": 0.2436, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.29037669666005794, |
| "grad_norm": 0.25374903168798246, |
| "learning_rate": 9.938940140651398e-06, |
| "loss": 0.2551, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.2909867317370749, |
| "grad_norm": 0.26923026331434907, |
| "learning_rate": 9.938108292548044e-06, |
| "loss": 0.2356, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.2915967668140918, |
| "grad_norm": 0.2750449727972376, |
| "learning_rate": 9.937270851666341e-06, |
| "loss": 0.2638, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.29220680189110876, |
| "grad_norm": 0.2533222033571772, |
| "learning_rate": 9.936427818954753e-06, |
| "loss": 0.2432, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.29281683696812566, |
| "grad_norm": 0.2506513318638803, |
| "learning_rate": 9.935579195368078e-06, |
| "loss": 0.2677, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.2934268720451426, |
| "grad_norm": 0.2631288863531059, |
| "learning_rate": 9.934724981867447e-06, |
| "loss": 0.2528, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.2940369071221595, |
| "grad_norm": 0.23937145957999464, |
| "learning_rate": 9.93386517942032e-06, |
| "loss": 0.2478, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.2946469421991765, |
| "grad_norm": 0.25924435777122534, |
| "learning_rate": 9.93299978900049e-06, |
| "loss": 0.2504, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.2952569772761934, |
| "grad_norm": 0.26376643896473395, |
| "learning_rate": 9.932128811588074e-06, |
| "loss": 0.2467, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.29586701235321033, |
| "grad_norm": 0.27081772073605664, |
| "learning_rate": 9.931252248169518e-06, |
| "loss": 0.2649, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.29647704743022724, |
| "grad_norm": 0.2572481234239358, |
| "learning_rate": 9.9303700997376e-06, |
| "loss": 0.237, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.2970870825072442, |
| "grad_norm": 0.263952425140558, |
| "learning_rate": 9.929482367291417e-06, |
| "loss": 0.2566, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.2976971175842611, |
| "grad_norm": 0.25097369019657156, |
| "learning_rate": 9.928589051836392e-06, |
| "loss": 0.2513, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.298307152661278, |
| "grad_norm": 0.25162784794934706, |
| "learning_rate": 9.927690154384273e-06, |
| "loss": 0.2493, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.29891718773829495, |
| "grad_norm": 0.29235771663739313, |
| "learning_rate": 9.92678567595313e-06, |
| "loss": 0.2734, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.29952722281531186, |
| "grad_norm": 0.24856494040312965, |
| "learning_rate": 9.92587561756735e-06, |
| "loss": 0.2632, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.3001372578923288, |
| "grad_norm": 0.25410549867586324, |
| "learning_rate": 9.924959980257645e-06, |
| "loss": 0.2384, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.3007472929693457, |
| "grad_norm": 0.2532511684622894, |
| "learning_rate": 9.924038765061042e-06, |
| "loss": 0.2358, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.30135732804636267, |
| "grad_norm": 0.24565076144059494, |
| "learning_rate": 9.923111973020885e-06, |
| "loss": 0.257, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.3019673631233796, |
| "grad_norm": 0.2583658220557501, |
| "learning_rate": 9.922179605186837e-06, |
| "loss": 0.2553, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.30257739820039653, |
| "grad_norm": 0.2482486410141977, |
| "learning_rate": 9.921241662614874e-06, |
| "loss": 0.2399, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.30318743327741343, |
| "grad_norm": 0.25631202391417307, |
| "learning_rate": 9.920298146367287e-06, |
| "loss": 0.2649, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 0.24307313332469035, |
| "learning_rate": 9.919349057512679e-06, |
| "loss": 0.242, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.3044075034314473, |
| "grad_norm": 0.22637978283295307, |
| "learning_rate": 9.918394397125963e-06, |
| "loss": 0.2278, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.30501753850846425, |
| "grad_norm": 0.25163879424818747, |
| "learning_rate": 9.917434166288364e-06, |
| "loss": 0.2265, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.30501753850846425, |
| "eval_loss": 0.2490791380405426, |
| "eval_runtime": 260.635, |
| "eval_samples_per_second": 4.105, |
| "eval_steps_per_second": 0.13, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.30562757358548115, |
| "grad_norm": 0.24358156361203398, |
| "learning_rate": 9.916468366087418e-06, |
| "loss": 0.2425, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.3062376086624981, |
| "grad_norm": 0.33977096706149845, |
| "learning_rate": 9.915496997616963e-06, |
| "loss": 0.2468, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.306847643739515, |
| "grad_norm": 0.23025095643742358, |
| "learning_rate": 9.91452006197715e-06, |
| "loss": 0.2266, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.30745767881653197, |
| "grad_norm": 0.2600726676831414, |
| "learning_rate": 9.913537560274426e-06, |
| "loss": 0.2571, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.30806771389354887, |
| "grad_norm": 0.2228281275268031, |
| "learning_rate": 9.912549493621555e-06, |
| "loss": 0.2368, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.3086777489705658, |
| "grad_norm": 0.24438283754941054, |
| "learning_rate": 9.911555863137593e-06, |
| "loss": 0.2625, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.3092877840475827, |
| "grad_norm": 0.23383710835984622, |
| "learning_rate": 9.910556669947902e-06, |
| "loss": 0.2471, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.3098978191245997, |
| "grad_norm": 0.24743991801197227, |
| "learning_rate": 9.909551915184144e-06, |
| "loss": 0.2635, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.3105078542016166, |
| "grad_norm": 0.2794396361771197, |
| "learning_rate": 9.908541599984276e-06, |
| "loss": 0.276, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.31111788927863354, |
| "grad_norm": 0.2814300827849775, |
| "learning_rate": 9.907525725492559e-06, |
| "loss": 0.2646, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.31172792435565044, |
| "grad_norm": 0.24066164246061003, |
| "learning_rate": 9.906504292859544e-06, |
| "loss": 0.2815, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.3123379594326674, |
| "grad_norm": 0.2720619923340415, |
| "learning_rate": 9.905477303242085e-06, |
| "loss": 0.2475, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.3129479945096843, |
| "grad_norm": 0.266396569737668, |
| "learning_rate": 9.904444757803322e-06, |
| "loss": 0.2143, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.31355802958670126, |
| "grad_norm": 0.26843992179019743, |
| "learning_rate": 9.903406657712688e-06, |
| "loss": 0.258, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.31416806466371816, |
| "grad_norm": 0.24052398634290234, |
| "learning_rate": 9.902363004145914e-06, |
| "loss": 0.2387, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.3147780997407351, |
| "grad_norm": 0.2523106598314947, |
| "learning_rate": 9.901313798285012e-06, |
| "loss": 0.2466, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.315388134817752, |
| "grad_norm": 0.22329761292848602, |
| "learning_rate": 9.90025904131829e-06, |
| "loss": 0.2187, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.315998169894769, |
| "grad_norm": 0.22947361076735034, |
| "learning_rate": 9.899198734440335e-06, |
| "loss": 0.2544, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.3166082049717859, |
| "grad_norm": 0.2645678387368134, |
| "learning_rate": 9.898132878852026e-06, |
| "loss": 0.2703, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.3172182400488028, |
| "grad_norm": 0.22881394529975585, |
| "learning_rate": 9.897061475760528e-06, |
| "loss": 0.2188, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.31782827512581974, |
| "grad_norm": 0.23413558660635153, |
| "learning_rate": 9.895984526379282e-06, |
| "loss": 0.2121, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.31843831020283664, |
| "grad_norm": 0.24515654764657868, |
| "learning_rate": 9.894902031928014e-06, |
| "loss": 0.2582, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.3190483452798536, |
| "grad_norm": 0.24783751437653048, |
| "learning_rate": 9.89381399363273e-06, |
| "loss": 0.2454, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.3196583803568705, |
| "grad_norm": 0.25944084766719894, |
| "learning_rate": 9.892720412725717e-06, |
| "loss": 0.2339, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.32026841543388745, |
| "grad_norm": 0.24221200657802805, |
| "learning_rate": 9.891621290445534e-06, |
| "loss": 0.2415, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.32087845051090436, |
| "grad_norm": 0.26721221201594864, |
| "learning_rate": 9.890516628037024e-06, |
| "loss": 0.2467, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.3214884855879213, |
| "grad_norm": 0.2661462723846872, |
| "learning_rate": 9.889406426751296e-06, |
| "loss": 0.2544, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.3220985206649382, |
| "grad_norm": 0.25737118244462615, |
| "learning_rate": 9.88829068784574e-06, |
| "loss": 0.2519, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.32270855574195517, |
| "grad_norm": 0.2566760568897278, |
| "learning_rate": 9.887169412584012e-06, |
| "loss": 0.248, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.3233185908189721, |
| "grad_norm": 0.27835987527905887, |
| "learning_rate": 9.88604260223604e-06, |
| "loss": 0.2469, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.32392862589598903, |
| "grad_norm": 0.2655282154010336, |
| "learning_rate": 9.884910258078022e-06, |
| "loss": 0.2599, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.32453866097300593, |
| "grad_norm": 0.2667148794264865, |
| "learning_rate": 9.883772381392423e-06, |
| "loss": 0.2412, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.3251486960500229, |
| "grad_norm": 0.2623581156385756, |
| "learning_rate": 9.882628973467972e-06, |
| "loss": 0.2618, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.3257587311270398, |
| "grad_norm": 0.24318620380902956, |
| "learning_rate": 9.881480035599667e-06, |
| "loss": 0.2376, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.32636876620405675, |
| "grad_norm": 0.26633961251095956, |
| "learning_rate": 9.880325569088765e-06, |
| "loss": 0.2503, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.32697880128107365, |
| "grad_norm": 0.2630382405080692, |
| "learning_rate": 9.879165575242788e-06, |
| "loss": 0.2567, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.3275888363580906, |
| "grad_norm": 0.2541644574129942, |
| "learning_rate": 9.878000055375512e-06, |
| "loss": 0.2474, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.3281988714351075, |
| "grad_norm": 0.30155457807091895, |
| "learning_rate": 9.87682901080698e-06, |
| "loss": 0.2464, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.32880890651212447, |
| "grad_norm": 0.2660733739753395, |
| "learning_rate": 9.875652442863483e-06, |
| "loss": 0.2494, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.32941894158914137, |
| "grad_norm": 0.24132541271471367, |
| "learning_rate": 9.874470352877576e-06, |
| "loss": 0.2653, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.3300289766661583, |
| "grad_norm": 0.2315138925207163, |
| "learning_rate": 9.873282742188066e-06, |
| "loss": 0.2417, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.3306390117431752, |
| "grad_norm": 0.31417481601320424, |
| "learning_rate": 9.87208961214001e-06, |
| "loss": 0.2766, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.3312490468201922, |
| "grad_norm": 0.24645469040878668, |
| "learning_rate": 9.870890964084713e-06, |
| "loss": 0.2439, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.3318590818972091, |
| "grad_norm": 0.2363463068965621, |
| "learning_rate": 9.86968679937974e-06, |
| "loss": 0.2534, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.33246911697422604, |
| "grad_norm": 0.2692926962955553, |
| "learning_rate": 9.868477119388897e-06, |
| "loss": 0.2481, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.33307915205124294, |
| "grad_norm": 0.2470726477470715, |
| "learning_rate": 9.867261925482233e-06, |
| "loss": 0.2376, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.3336891871282599, |
| "grad_norm": 0.26915801212713547, |
| "learning_rate": 9.866041219036051e-06, |
| "loss": 0.2567, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.3342992222052768, |
| "grad_norm": 0.23819617510254015, |
| "learning_rate": 9.86481500143289e-06, |
| "loss": 0.2287, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.3349092572822937, |
| "grad_norm": 0.23372133613640378, |
| "learning_rate": 9.863583274061535e-06, |
| "loss": 0.2132, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.33551929235931066, |
| "grad_norm": 0.2560238801091462, |
| "learning_rate": 9.862346038317009e-06, |
| "loss": 0.2379, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.33612932743632756, |
| "grad_norm": 0.2577909389450448, |
| "learning_rate": 9.861103295600574e-06, |
| "loss": 0.2468, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.3367393625133445, |
| "grad_norm": 0.36804965214464147, |
| "learning_rate": 9.859855047319732e-06, |
| "loss": 0.2553, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.3373493975903614, |
| "grad_norm": 0.25636657805348956, |
| "learning_rate": 9.858601294888212e-06, |
| "loss": 0.2595, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.3379594326673784, |
| "grad_norm": 0.25072273193978134, |
| "learning_rate": 9.85734203972599e-06, |
| "loss": 0.2472, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.3385694677443953, |
| "grad_norm": 0.2721479101551272, |
| "learning_rate": 9.856077283259262e-06, |
| "loss": 0.2394, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.33917950282141224, |
| "grad_norm": 0.26469111104432536, |
| "learning_rate": 9.85480702692046e-06, |
| "loss": 0.2499, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.33978953789842914, |
| "grad_norm": 0.24605892848863048, |
| "learning_rate": 9.853531272148248e-06, |
| "loss": 0.2402, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.3403995729754461, |
| "grad_norm": 0.2470370356204931, |
| "learning_rate": 9.852250020387513e-06, |
| "loss": 0.2396, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.341009608052463, |
| "grad_norm": 0.27967529556696236, |
| "learning_rate": 9.850963273089365e-06, |
| "loss": 0.2592, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.34161964312947996, |
| "grad_norm": 0.2550975214568574, |
| "learning_rate": 9.849671031711146e-06, |
| "loss": 0.2357, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.34222967820649686, |
| "grad_norm": 0.23782560164722819, |
| "learning_rate": 9.848373297716414e-06, |
| "loss": 0.2302, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.3428397132835138, |
| "grad_norm": 0.26172256616517786, |
| "learning_rate": 9.847070072574952e-06, |
| "loss": 0.261, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.3434497483605307, |
| "grad_norm": 0.25376783721614055, |
| "learning_rate": 9.84576135776276e-06, |
| "loss": 0.2525, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.3440597834375477, |
| "grad_norm": 0.25534231185707695, |
| "learning_rate": 9.844447154762054e-06, |
| "loss": 0.2581, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.3446698185145646, |
| "grad_norm": 0.2514937599287347, |
| "learning_rate": 9.84312746506127e-06, |
| "loss": 0.2408, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.34527985359158153, |
| "grad_norm": 0.23793239330047294, |
| "learning_rate": 9.841802290155054e-06, |
| "loss": 0.2457, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.34588988866859843, |
| "grad_norm": 0.2498278422776726, |
| "learning_rate": 9.840471631544266e-06, |
| "loss": 0.2472, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.3464999237456154, |
| "grad_norm": 0.25133312384225875, |
| "learning_rate": 9.839135490735978e-06, |
| "loss": 0.2525, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.3471099588226323, |
| "grad_norm": 0.23975828367424365, |
| "learning_rate": 9.837793869243468e-06, |
| "loss": 0.2483, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.34771999389964925, |
| "grad_norm": 0.23902175350837487, |
| "learning_rate": 9.836446768586225e-06, |
| "loss": 0.2414, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.34833002897666615, |
| "grad_norm": 0.23786061397646546, |
| "learning_rate": 9.835094190289941e-06, |
| "loss": 0.2419, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.3489400640536831, |
| "grad_norm": 0.23971167681501773, |
| "learning_rate": 9.833736135886513e-06, |
| "loss": 0.2432, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.3495500991307, |
| "grad_norm": 0.2469864119024162, |
| "learning_rate": 9.832372606914038e-06, |
| "loss": 0.2495, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.35016013420771697, |
| "grad_norm": 0.2516517284719589, |
| "learning_rate": 9.831003604916815e-06, |
| "loss": 0.2301, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.35077016928473387, |
| "grad_norm": 0.2604419831002641, |
| "learning_rate": 9.829629131445342e-06, |
| "loss": 0.2583, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.3513802043617508, |
| "grad_norm": 0.24036837488264234, |
| "learning_rate": 9.828249188056314e-06, |
| "loss": 0.2423, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.3519902394387677, |
| "grad_norm": 0.2693073521625091, |
| "learning_rate": 9.826863776312621e-06, |
| "loss": 0.2515, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.3526002745157847, |
| "grad_norm": 0.24574119339988224, |
| "learning_rate": 9.825472897783344e-06, |
| "loss": 0.2328, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.3532103095928016, |
| "grad_norm": 0.2422586260824654, |
| "learning_rate": 9.824076554043759e-06, |
| "loss": 0.23, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.3538203446698185, |
| "grad_norm": 0.24616937036065017, |
| "learning_rate": 9.822674746675329e-06, |
| "loss": 0.216, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.35443037974683544, |
| "grad_norm": 0.2619722786538483, |
| "learning_rate": 9.821267477265705e-06, |
| "loss": 0.2529, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.35504041482385235, |
| "grad_norm": 0.23474122964467006, |
| "learning_rate": 9.819854747408728e-06, |
| "loss": 0.2459, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.3556504499008693, |
| "grad_norm": 0.25658578266252213, |
| "learning_rate": 9.81843655870442e-06, |
| "loss": 0.2407, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.3562604849778862, |
| "grad_norm": 0.2543668048771176, |
| "learning_rate": 9.817012912758986e-06, |
| "loss": 0.2607, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.35687052005490316, |
| "grad_norm": 0.24916680040867104, |
| "learning_rate": 9.815583811184809e-06, |
| "loss": 0.2371, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.35748055513192006, |
| "grad_norm": 0.24253556661774894, |
| "learning_rate": 9.814149255600458e-06, |
| "loss": 0.2505, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.358090590208937, |
| "grad_norm": 0.25098425841167166, |
| "learning_rate": 9.812709247630671e-06, |
| "loss": 0.2719, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.3587006252859539, |
| "grad_norm": 0.2410722695307963, |
| "learning_rate": 9.81126378890637e-06, |
| "loss": 0.2537, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.3593106603629709, |
| "grad_norm": 0.23987057965373262, |
| "learning_rate": 9.80981288106464e-06, |
| "loss": 0.2652, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.3599206954399878, |
| "grad_norm": 0.24963404051174887, |
| "learning_rate": 9.808356525748748e-06, |
| "loss": 0.236, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.36053073051700474, |
| "grad_norm": 0.2484565381470736, |
| "learning_rate": 9.806894724608122e-06, |
| "loss": 0.2628, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.36114076559402164, |
| "grad_norm": 0.23634017497508783, |
| "learning_rate": 9.805427479298365e-06, |
| "loss": 0.2463, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.3617508006710386, |
| "grad_norm": 0.2790490778566573, |
| "learning_rate": 9.803954791481239e-06, |
| "loss": 0.2687, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.3623608357480555, |
| "grad_norm": 0.25749534513801714, |
| "learning_rate": 9.802476662824676e-06, |
| "loss": 0.2622, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.36297087082507246, |
| "grad_norm": 0.24333551374135254, |
| "learning_rate": 9.800993095002767e-06, |
| "loss": 0.247, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.36358090590208936, |
| "grad_norm": 0.24467156233879625, |
| "learning_rate": 9.799504089695762e-06, |
| "loss": 0.2414, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.3641909409791063, |
| "grad_norm": 0.24529249172357293, |
| "learning_rate": 9.798009648590073e-06, |
| "loss": 0.2644, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.3648009760561232, |
| "grad_norm": 0.281323360902479, |
| "learning_rate": 9.796509773378267e-06, |
| "loss": 0.2503, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.3654110111331402, |
| "grad_norm": 0.24713120414672382, |
| "learning_rate": 9.795004465759067e-06, |
| "loss": 0.2581, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.3660210462101571, |
| "grad_norm": 0.244640088774755, |
| "learning_rate": 9.793493727437343e-06, |
| "loss": 0.2415, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.36663108128717403, |
| "grad_norm": 0.24741333198569834, |
| "learning_rate": 9.79197756012412e-06, |
| "loss": 0.2553, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.36724111636419093, |
| "grad_norm": 0.2486624353021153, |
| "learning_rate": 9.790455965536574e-06, |
| "loss": 0.2425, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.3678511514412079, |
| "grad_norm": 0.25239998228300475, |
| "learning_rate": 9.788928945398025e-06, |
| "loss": 0.2555, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.3684611865182248, |
| "grad_norm": 0.2651978042658129, |
| "learning_rate": 9.787396501437934e-06, |
| "loss": 0.2413, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.36907122159524175, |
| "grad_norm": 0.2404173861055526, |
| "learning_rate": 9.785858635391913e-06, |
| "loss": 0.2475, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.36968125667225865, |
| "grad_norm": 0.23571440170965438, |
| "learning_rate": 9.78431534900171e-06, |
| "loss": 0.2533, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.3702912917492756, |
| "grad_norm": 0.24233704202409498, |
| "learning_rate": 9.782766644015212e-06, |
| "loss": 0.2397, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.3709013268262925, |
| "grad_norm": 0.2610196149576871, |
| "learning_rate": 9.781212522186442e-06, |
| "loss": 0.2421, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.37151136190330947, |
| "grad_norm": 0.23835119864458468, |
| "learning_rate": 9.779652985275562e-06, |
| "loss": 0.2245, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.37212139698032637, |
| "grad_norm": 0.23920196375213928, |
| "learning_rate": 9.778088035048866e-06, |
| "loss": 0.2632, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.37273143205734327, |
| "grad_norm": 0.2649601180748752, |
| "learning_rate": 9.776517673278772e-06, |
| "loss": 0.263, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.3733414671343602, |
| "grad_norm": 0.2542987280812582, |
| "learning_rate": 9.774941901743838e-06, |
| "loss": 0.2374, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.37395150221137713, |
| "grad_norm": 0.24882298018650365, |
| "learning_rate": 9.773360722228742e-06, |
| "loss": 0.2457, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.3745615372883941, |
| "grad_norm": 0.23412848045448256, |
| "learning_rate": 9.771774136524287e-06, |
| "loss": 0.2483, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.375171572365411, |
| "grad_norm": 0.25754931185275737, |
| "learning_rate": 9.770182146427403e-06, |
| "loss": 0.2501, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.37578160744242795, |
| "grad_norm": 0.24720643726302463, |
| "learning_rate": 9.768584753741134e-06, |
| "loss": 0.2336, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.37639164251944485, |
| "grad_norm": 0.23429526920676347, |
| "learning_rate": 9.766981960274653e-06, |
| "loss": 0.2523, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.3770016775964618, |
| "grad_norm": 0.23286582855824883, |
| "learning_rate": 9.76537376784324e-06, |
| "loss": 0.2507, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.3776117126734787, |
| "grad_norm": 0.24213790165188145, |
| "learning_rate": 9.763760178268296e-06, |
| "loss": 0.2422, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.37822174775049566, |
| "grad_norm": 0.23510019087838768, |
| "learning_rate": 9.762141193377329e-06, |
| "loss": 0.2207, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.37883178282751256, |
| "grad_norm": 0.2468787558422412, |
| "learning_rate": 9.760516815003965e-06, |
| "loss": 0.2523, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.3794418179045295, |
| "grad_norm": 0.3058221224160152, |
| "learning_rate": 9.758887044987929e-06, |
| "loss": 0.2443, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.3800518529815464, |
| "grad_norm": 0.26093269918959, |
| "learning_rate": 9.757251885175063e-06, |
| "loss": 0.2678, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.3806618880585634, |
| "grad_norm": 0.22969564204204285, |
| "learning_rate": 9.755611337417306e-06, |
| "loss": 0.2374, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.3812719231355803, |
| "grad_norm": 0.23089944539257504, |
| "learning_rate": 9.753965403572703e-06, |
| "loss": 0.231, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.38188195821259724, |
| "grad_norm": 0.2597564856286656, |
| "learning_rate": 9.752314085505396e-06, |
| "loss": 0.2576, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.38249199328961414, |
| "grad_norm": 0.2531822089084447, |
| "learning_rate": 9.750657385085627e-06, |
| "loss": 0.2298, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.3831020283666311, |
| "grad_norm": 0.2848691745062799, |
| "learning_rate": 9.748995304189734e-06, |
| "loss": 0.2518, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.383712063443648, |
| "grad_norm": 0.24751375845503512, |
| "learning_rate": 9.747327844700147e-06, |
| "loss": 0.242, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.38432209852066496, |
| "grad_norm": 0.2311387087802481, |
| "learning_rate": 9.745655008505392e-06, |
| "loss": 0.2535, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.38493213359768186, |
| "grad_norm": 0.2412556673680029, |
| "learning_rate": 9.74397679750008e-06, |
| "loss": 0.2412, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.3855421686746988, |
| "grad_norm": 0.24858957090381073, |
| "learning_rate": 9.74229321358491e-06, |
| "loss": 0.2623, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.3861522037517157, |
| "grad_norm": 0.233987379984257, |
| "learning_rate": 9.740604258666668e-06, |
| "loss": 0.2426, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.3867622388287327, |
| "grad_norm": 0.2529694629667342, |
| "learning_rate": 9.738909934658223e-06, |
| "loss": 0.251, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.3873722739057496, |
| "grad_norm": 0.2508951547032724, |
| "learning_rate": 9.737210243478522e-06, |
| "loss": 0.254, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.38798230898276653, |
| "grad_norm": 0.2356895929288683, |
| "learning_rate": 9.735505187052595e-06, |
| "loss": 0.2533, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.38859234405978343, |
| "grad_norm": 0.27045617478946093, |
| "learning_rate": 9.733794767311545e-06, |
| "loss": 0.242, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.3892023791368004, |
| "grad_norm": 0.2549042475520284, |
| "learning_rate": 9.732078986192552e-06, |
| "loss": 0.2471, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.3898124142138173, |
| "grad_norm": 0.23978833581681586, |
| "learning_rate": 9.730357845638866e-06, |
| "loss": 0.2437, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.39042244929083425, |
| "grad_norm": 0.2377076563680043, |
| "learning_rate": 9.72863134759981e-06, |
| "loss": 0.2443, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.39103248436785115, |
| "grad_norm": 0.24192790263242697, |
| "learning_rate": 9.726899494030768e-06, |
| "loss": 0.2199, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.39164251944486805, |
| "grad_norm": 0.2292328096544485, |
| "learning_rate": 9.725162286893197e-06, |
| "loss": 0.2402, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.392252554521885, |
| "grad_norm": 0.2339555266898248, |
| "learning_rate": 9.723419728154617e-06, |
| "loss": 0.2467, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.3928625895989019, |
| "grad_norm": 0.2542148373647154, |
| "learning_rate": 9.721671819788603e-06, |
| "loss": 0.2633, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.39347262467591887, |
| "grad_norm": 0.24151750283902107, |
| "learning_rate": 9.719918563774793e-06, |
| "loss": 0.2212, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.39408265975293577, |
| "grad_norm": 0.2430793021843514, |
| "learning_rate": 9.71815996209888e-06, |
| "loss": 0.2498, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.39469269482995273, |
| "grad_norm": 0.252445657472582, |
| "learning_rate": 9.716396016752616e-06, |
| "loss": 0.2512, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.39530272990696963, |
| "grad_norm": 0.2429734159537907, |
| "learning_rate": 9.7146267297338e-06, |
| "loss": 0.2601, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.3959127649839866, |
| "grad_norm": 0.2582242931327393, |
| "learning_rate": 9.712852103046281e-06, |
| "loss": 0.2587, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.3965228000610035, |
| "grad_norm": 0.26067550631747444, |
| "learning_rate": 9.711072138699957e-06, |
| "loss": 0.2501, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.39713283513802045, |
| "grad_norm": 0.24554660526811284, |
| "learning_rate": 9.709286838710774e-06, |
| "loss": 0.2731, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.39774287021503735, |
| "grad_norm": 0.23889263674607647, |
| "learning_rate": 9.707496205100714e-06, |
| "loss": 0.2485, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.3983529052920543, |
| "grad_norm": 0.25991253213394694, |
| "learning_rate": 9.705700239897809e-06, |
| "loss": 0.2524, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.3989629403690712, |
| "grad_norm": 0.2459360727540675, |
| "learning_rate": 9.70389894513612e-06, |
| "loss": 0.2523, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.39957297544608816, |
| "grad_norm": 0.25005925280691177, |
| "learning_rate": 9.70209232285575e-06, |
| "loss": 0.2494, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.40018301052310506, |
| "grad_norm": 0.2675096301368609, |
| "learning_rate": 9.700280375102835e-06, |
| "loss": 0.2611, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.400793045600122, |
| "grad_norm": 0.2406185003357182, |
| "learning_rate": 9.698463103929542e-06, |
| "loss": 0.2437, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.4014030806771389, |
| "grad_norm": 0.2641251121809679, |
| "learning_rate": 9.696640511394066e-06, |
| "loss": 0.2444, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.4020131157541559, |
| "grad_norm": 0.24262195457977112, |
| "learning_rate": 9.694812599560632e-06, |
| "loss": 0.2508, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.4026231508311728, |
| "grad_norm": 0.23038210084607058, |
| "learning_rate": 9.692979370499485e-06, |
| "loss": 0.2411, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.40323318590818974, |
| "grad_norm": 0.24981753963500378, |
| "learning_rate": 9.691140826286893e-06, |
| "loss": 0.2684, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.40384322098520664, |
| "grad_norm": 0.23408140036354103, |
| "learning_rate": 9.689296969005151e-06, |
| "loss": 0.2554, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.4044532560622236, |
| "grad_norm": 0.24536784626888322, |
| "learning_rate": 9.68744780074256e-06, |
| "loss": 0.247, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.4050632911392405, |
| "grad_norm": 0.24786305639021228, |
| "learning_rate": 9.685593323593445e-06, |
| "loss": 0.241, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.40567332621625746, |
| "grad_norm": 0.2437211055920838, |
| "learning_rate": 9.68373353965814e-06, |
| "loss": 0.2563, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.40628336129327436, |
| "grad_norm": 0.23494770231046405, |
| "learning_rate": 9.68186845104299e-06, |
| "loss": 0.2622, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.4068933963702913, |
| "grad_norm": 0.2561543984617132, |
| "learning_rate": 9.679998059860347e-06, |
| "loss": 0.2498, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.4075034314473082, |
| "grad_norm": 0.23142595032383642, |
| "learning_rate": 9.678122368228571e-06, |
| "loss": 0.2503, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.4081134665243252, |
| "grad_norm": 0.23388256298097315, |
| "learning_rate": 9.676241378272022e-06, |
| "loss": 0.2611, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.4087235016013421, |
| "grad_norm": 0.2171665951648235, |
| "learning_rate": 9.674355092121064e-06, |
| "loss": 0.2339, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.409333536678359, |
| "grad_norm": 0.23523716756609286, |
| "learning_rate": 9.672463511912056e-06, |
| "loss": 0.2349, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.40994357175537594, |
| "grad_norm": 0.26050667381938597, |
| "learning_rate": 9.670566639787355e-06, |
| "loss": 0.2588, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.41055360683239284, |
| "grad_norm": 0.2568032147090647, |
| "learning_rate": 9.66866447789531e-06, |
| "loss": 0.2666, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.4111636419094098, |
| "grad_norm": 0.23258591576948226, |
| "learning_rate": 9.666757028390267e-06, |
| "loss": 0.2453, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.4117736769864267, |
| "grad_norm": 0.2532173686218808, |
| "learning_rate": 9.66484429343255e-06, |
| "loss": 0.2507, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.41238371206344365, |
| "grad_norm": 0.24742853540044907, |
| "learning_rate": 9.662926275188478e-06, |
| "loss": 0.2469, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.41299374714046055, |
| "grad_norm": 0.26148385144664343, |
| "learning_rate": 9.66100297583035e-06, |
| "loss": 0.2606, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.4136037822174775, |
| "grad_norm": 0.242123360508457, |
| "learning_rate": 9.659074397536446e-06, |
| "loss": 0.2413, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.4142138172944944, |
| "grad_norm": 0.25053386707994624, |
| "learning_rate": 9.657140542491025e-06, |
| "loss": 0.2598, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.41482385237151137, |
| "grad_norm": 0.2266347754111311, |
| "learning_rate": 9.655201412884328e-06, |
| "loss": 0.2333, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.41543388744852827, |
| "grad_norm": 0.29818873435724125, |
| "learning_rate": 9.65325701091256e-06, |
| "loss": 0.2582, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.41604392252554523, |
| "grad_norm": 0.2658125962674758, |
| "learning_rate": 9.651307338777903e-06, |
| "loss": 0.2581, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.41665395760256213, |
| "grad_norm": 0.2554674490027522, |
| "learning_rate": 9.649352398688506e-06, |
| "loss": 0.2738, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.4172639926795791, |
| "grad_norm": 0.25453506037265533, |
| "learning_rate": 9.647392192858489e-06, |
| "loss": 0.234, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.417874027756596, |
| "grad_norm": 0.25461734067333075, |
| "learning_rate": 9.645426723507929e-06, |
| "loss": 0.2424, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.41848406283361295, |
| "grad_norm": 0.23888484597715387, |
| "learning_rate": 9.64345599286287e-06, |
| "loss": 0.2428, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.41909409791062985, |
| "grad_norm": 0.2489790124226144, |
| "learning_rate": 9.64148000315531e-06, |
| "loss": 0.2418, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.4197041329876468, |
| "grad_norm": 0.2678885780331205, |
| "learning_rate": 9.63949875662321e-06, |
| "loss": 0.2616, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.4203141680646637, |
| "grad_norm": 0.2538936493974687, |
| "learning_rate": 9.637512255510475e-06, |
| "loss": 0.2534, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.42092420314168066, |
| "grad_norm": 0.23909031877638567, |
| "learning_rate": 9.635520502066968e-06, |
| "loss": 0.2389, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.42153423821869757, |
| "grad_norm": 0.25332515366600283, |
| "learning_rate": 9.633523498548502e-06, |
| "loss": 0.2502, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.4221442732957145, |
| "grad_norm": 0.2391451036740086, |
| "learning_rate": 9.63152124721683e-06, |
| "loss": 0.2659, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.4227543083727314, |
| "grad_norm": 0.2576670329623059, |
| "learning_rate": 9.629513750339656e-06, |
| "loss": 0.2621, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.4233643434497484, |
| "grad_norm": 0.25480315338019477, |
| "learning_rate": 9.627501010190616e-06, |
| "loss": 0.2515, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.4239743785267653, |
| "grad_norm": 0.23458259674144666, |
| "learning_rate": 9.625483029049295e-06, |
| "loss": 0.2643, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.42458441360378224, |
| "grad_norm": 0.24782100843445132, |
| "learning_rate": 9.623459809201201e-06, |
| "loss": 0.2412, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.42519444868079914, |
| "grad_norm": 0.2629037328093925, |
| "learning_rate": 9.62143135293779e-06, |
| "loss": 0.2435, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.4258044837578161, |
| "grad_norm": 0.23441780135241397, |
| "learning_rate": 9.619397662556434e-06, |
| "loss": 0.2275, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.426414518834833, |
| "grad_norm": 0.25570881804535683, |
| "learning_rate": 9.617358740360446e-06, |
| "loss": 0.2376, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.42702455391184996, |
| "grad_norm": 0.2575592928975001, |
| "learning_rate": 9.615314588659054e-06, |
| "loss": 0.2375, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.42763458898886686, |
| "grad_norm": 0.26485082967353496, |
| "learning_rate": 9.613265209767417e-06, |
| "loss": 0.218, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.42824462406588376, |
| "grad_norm": 0.26790743064582434, |
| "learning_rate": 9.611210606006606e-06, |
| "loss": 0.2398, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.4288546591429007, |
| "grad_norm": 0.2327699012795595, |
| "learning_rate": 9.60915077970362e-06, |
| "loss": 0.2458, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.4294646942199176, |
| "grad_norm": 0.25209047510843435, |
| "learning_rate": 9.607085733191362e-06, |
| "loss": 0.2484, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.4300747292969346, |
| "grad_norm": 0.24532855214020663, |
| "learning_rate": 9.605015468808651e-06, |
| "loss": 0.2241, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.4306847643739515, |
| "grad_norm": 0.23707628308383044, |
| "learning_rate": 9.602939988900223e-06, |
| "loss": 0.2118, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.43129479945096844, |
| "grad_norm": 0.2717265067792356, |
| "learning_rate": 9.600859295816708e-06, |
| "loss": 0.2365, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.43190483452798534, |
| "grad_norm": 0.24187592973175528, |
| "learning_rate": 9.598773391914655e-06, |
| "loss": 0.2528, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.4325148696050023, |
| "grad_norm": 0.24095158717754256, |
| "learning_rate": 9.596682279556499e-06, |
| "loss": 0.2316, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.4331249046820192, |
| "grad_norm": 0.2384804389224989, |
| "learning_rate": 9.594585961110586e-06, |
| "loss": 0.2195, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.43373493975903615, |
| "grad_norm": 0.22980754276143053, |
| "learning_rate": 9.59248443895115e-06, |
| "loss": 0.2197, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.43434497483605305, |
| "grad_norm": 0.23000195088140646, |
| "learning_rate": 9.590377715458328e-06, |
| "loss": 0.2132, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.43495500991307, |
| "grad_norm": 0.24348636008374336, |
| "learning_rate": 9.588265793018141e-06, |
| "loss": 0.2598, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.4355650449900869, |
| "grad_norm": 0.24462607007817821, |
| "learning_rate": 9.586148674022498e-06, |
| "loss": 0.2716, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.43617508006710387, |
| "grad_norm": 0.24199303136022748, |
| "learning_rate": 9.584026360869195e-06, |
| "loss": 0.2577, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.4367851151441208, |
| "grad_norm": 0.2444394229369308, |
| "learning_rate": 9.581898855961911e-06, |
| "loss": 0.2409, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.43739515022113773, |
| "grad_norm": 0.22421862207213614, |
| "learning_rate": 9.579766161710209e-06, |
| "loss": 0.2193, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.43800518529815463, |
| "grad_norm": 0.25355224565574364, |
| "learning_rate": 9.577628280529519e-06, |
| "loss": 0.2433, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.4386152203751716, |
| "grad_norm": 0.24034777143737782, |
| "learning_rate": 9.575485214841158e-06, |
| "loss": 0.2358, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.4392252554521885, |
| "grad_norm": 0.23465493475078136, |
| "learning_rate": 9.573336967072304e-06, |
| "loss": 0.2584, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.43983529052920545, |
| "grad_norm": 0.22722796480040341, |
| "learning_rate": 9.571183539656011e-06, |
| "loss": 0.2401, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.44044532560622235, |
| "grad_norm": 0.2627819359298546, |
| "learning_rate": 9.569024935031198e-06, |
| "loss": 0.2439, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.4410553606832393, |
| "grad_norm": 0.24326571420142928, |
| "learning_rate": 9.566861155642646e-06, |
| "loss": 0.2381, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.4416653957602562, |
| "grad_norm": 0.23545903061020426, |
| "learning_rate": 9.564692203940997e-06, |
| "loss": 0.2525, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.44227543083727316, |
| "grad_norm": 0.24436023364810228, |
| "learning_rate": 9.562518082382751e-06, |
| "loss": 0.2426, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.44288546591429007, |
| "grad_norm": 0.24538160588010707, |
| "learning_rate": 9.560338793430266e-06, |
| "loss": 0.2364, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.443495500991307, |
| "grad_norm": 0.2275693745015377, |
| "learning_rate": 9.558154339551748e-06, |
| "loss": 0.2425, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.4441055360683239, |
| "grad_norm": 0.256020016141183, |
| "learning_rate": 9.555964723221258e-06, |
| "loss": 0.27, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.4447155711453409, |
| "grad_norm": 0.27300315086725535, |
| "learning_rate": 9.553769946918698e-06, |
| "loss": 0.2575, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.4453256062223578, |
| "grad_norm": 0.23385109804562323, |
| "learning_rate": 9.551570013129819e-06, |
| "loss": 0.2208, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.44593564129937474, |
| "grad_norm": 0.23386938978555916, |
| "learning_rate": 9.54936492434621e-06, |
| "loss": 0.2359, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.44654567637639164, |
| "grad_norm": 0.2404558436293708, |
| "learning_rate": 9.547154683065298e-06, |
| "loss": 0.2435, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.44715571145340854, |
| "grad_norm": 0.24500388350627236, |
| "learning_rate": 9.544939291790352e-06, |
| "loss": 0.2415, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.4477657465304255, |
| "grad_norm": 0.23691618297887454, |
| "learning_rate": 9.542718753030463e-06, |
| "loss": 0.2236, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.4483757816074424, |
| "grad_norm": 0.24857806187672893, |
| "learning_rate": 9.540493069300563e-06, |
| "loss": 0.2485, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.44898581668445936, |
| "grad_norm": 0.2382926637523894, |
| "learning_rate": 9.538262243121403e-06, |
| "loss": 0.2386, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.44959585176147626, |
| "grad_norm": 0.24030488684741594, |
| "learning_rate": 9.536026277019562e-06, |
| "loss": 0.2353, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.4502058868384932, |
| "grad_norm": 0.23339236344005018, |
| "learning_rate": 9.533785173527438e-06, |
| "loss": 0.2347, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.4508159219155101, |
| "grad_norm": 0.2299604201218471, |
| "learning_rate": 9.531538935183252e-06, |
| "loss": 0.2274, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.4514259569925271, |
| "grad_norm": 0.2301101735723042, |
| "learning_rate": 9.529287564531034e-06, |
| "loss": 0.2379, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.452035992069544, |
| "grad_norm": 0.24777348892591716, |
| "learning_rate": 9.527031064120632e-06, |
| "loss": 0.2158, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.45264602714656094, |
| "grad_norm": 0.2382250577730699, |
| "learning_rate": 9.524769436507703e-06, |
| "loss": 0.257, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.45325606222357784, |
| "grad_norm": 0.23491855965377412, |
| "learning_rate": 9.522502684253709e-06, |
| "loss": 0.2374, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.4538660973005948, |
| "grad_norm": 0.2381867196722309, |
| "learning_rate": 9.520230809925917e-06, |
| "loss": 0.2415, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.4544761323776117, |
| "grad_norm": 0.23464500865994656, |
| "learning_rate": 9.517953816097396e-06, |
| "loss": 0.252, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.45508616745462865, |
| "grad_norm": 0.23333387158301092, |
| "learning_rate": 9.515671705347012e-06, |
| "loss": 0.2315, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.45569620253164556, |
| "grad_norm": 0.2511717945420412, |
| "learning_rate": 9.513384480259427e-06, |
| "loss": 0.2442, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.4563062376086625, |
| "grad_norm": 0.2476626774783739, |
| "learning_rate": 9.511092143425093e-06, |
| "loss": 0.2407, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.4569162726856794, |
| "grad_norm": 0.25129087071494044, |
| "learning_rate": 9.508794697440257e-06, |
| "loss": 0.2677, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.45752630776269637, |
| "grad_norm": 0.25628919436974645, |
| "learning_rate": 9.506492144906949e-06, |
| "loss": 0.2609, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.4581363428397133, |
| "grad_norm": 0.24629875943849752, |
| "learning_rate": 9.50418448843298e-06, |
| "loss": 0.2318, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.45874637791673023, |
| "grad_norm": 0.2291062788929306, |
| "learning_rate": 9.501871730631944e-06, |
| "loss": 0.2361, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.45935641299374713, |
| "grad_norm": 0.22973777787593416, |
| "learning_rate": 9.499553874123213e-06, |
| "loss": 0.2352, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.4599664480707641, |
| "grad_norm": 0.23346050639344187, |
| "learning_rate": 9.497230921531938e-06, |
| "loss": 0.246, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.460576483147781, |
| "grad_norm": 0.23581281213008645, |
| "learning_rate": 9.494902875489031e-06, |
| "loss": 0.2576, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.46118651822479795, |
| "grad_norm": 0.2496519595616449, |
| "learning_rate": 9.492569738631182e-06, |
| "loss": 0.2328, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.46179655330181485, |
| "grad_norm": 0.24711405854515872, |
| "learning_rate": 9.490231513600842e-06, |
| "loss": 0.2448, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.4624065883788318, |
| "grad_norm": 0.2629979912664831, |
| "learning_rate": 9.487888203046232e-06, |
| "loss": 0.2282, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.4630166234558487, |
| "grad_norm": 0.23229398082709177, |
| "learning_rate": 9.485539809621319e-06, |
| "loss": 0.2529, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.46362665853286567, |
| "grad_norm": 0.2351650163696824, |
| "learning_rate": 9.48318633598584e-06, |
| "loss": 0.2423, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.46423669360988257, |
| "grad_norm": 0.22334068920688743, |
| "learning_rate": 9.480827784805278e-06, |
| "loss": 0.2503, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.4648467286868995, |
| "grad_norm": 0.23324497333424316, |
| "learning_rate": 9.478464158750873e-06, |
| "loss": 0.2598, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.4654567637639164, |
| "grad_norm": 0.2376429511505979, |
| "learning_rate": 9.476095460499604e-06, |
| "loss": 0.2426, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.4660667988409333, |
| "grad_norm": 0.23127830303431715, |
| "learning_rate": 9.473721692734203e-06, |
| "loss": 0.2293, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.4666768339179503, |
| "grad_norm": 0.2279735382560775, |
| "learning_rate": 9.47134285814314e-06, |
| "loss": 0.232, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.4672868689949672, |
| "grad_norm": 0.24060612490706318, |
| "learning_rate": 9.468958959420622e-06, |
| "loss": 0.2395, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.46789690407198414, |
| "grad_norm": 0.24073558221650648, |
| "learning_rate": 9.466569999266595e-06, |
| "loss": 0.2362, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.46850693914900104, |
| "grad_norm": 0.25234148310686183, |
| "learning_rate": 9.464175980386735e-06, |
| "loss": 0.2364, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.469116974226018, |
| "grad_norm": 0.24301381464309002, |
| "learning_rate": 9.461776905492446e-06, |
| "loss": 0.2541, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.4697270093030349, |
| "grad_norm": 0.2181646957236186, |
| "learning_rate": 9.459372777300863e-06, |
| "loss": 0.2315, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.47033704438005186, |
| "grad_norm": 0.24408126431197386, |
| "learning_rate": 9.456963598534843e-06, |
| "loss": 0.2635, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.47094707945706876, |
| "grad_norm": 0.2536793988988464, |
| "learning_rate": 9.454549371922958e-06, |
| "loss": 0.245, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.4715571145340857, |
| "grad_norm": 0.23467096465196274, |
| "learning_rate": 9.452130100199504e-06, |
| "loss": 0.2282, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.4721671496111026, |
| "grad_norm": 0.23975568079817508, |
| "learning_rate": 9.449705786104486e-06, |
| "loss": 0.2628, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.4727771846881196, |
| "grad_norm": 0.2531867022216451, |
| "learning_rate": 9.447276432383622e-06, |
| "loss": 0.2539, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.4733872197651365, |
| "grad_norm": 0.23729965993164157, |
| "learning_rate": 9.44484204178834e-06, |
| "loss": 0.2431, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.47399725484215344, |
| "grad_norm": 0.23266240192528784, |
| "learning_rate": 9.442402617075765e-06, |
| "loss": 0.2485, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.47460728991917034, |
| "grad_norm": 0.4593727203802515, |
| "learning_rate": 9.439958161008733e-06, |
| "loss": 0.2467, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.4752173249961873, |
| "grad_norm": 0.2423521825841295, |
| "learning_rate": 9.437508676355774e-06, |
| "loss": 0.2515, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.4758273600732042, |
| "grad_norm": 0.23786940493723296, |
| "learning_rate": 9.43505416589111e-06, |
| "loss": 0.2582, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.47643739515022115, |
| "grad_norm": 0.24386754510833997, |
| "learning_rate": 9.43259463239466e-06, |
| "loss": 0.2364, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.47704743022723806, |
| "grad_norm": 0.24902097785836147, |
| "learning_rate": 9.43013007865203e-06, |
| "loss": 0.214, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.477657465304255, |
| "grad_norm": 0.23911865820557984, |
| "learning_rate": 9.427660507454515e-06, |
| "loss": 0.2443, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.4782675003812719, |
| "grad_norm": 0.2557095629339037, |
| "learning_rate": 9.425185921599085e-06, |
| "loss": 0.2402, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.47887753545828887, |
| "grad_norm": 0.2521433109274837, |
| "learning_rate": 9.422706323888398e-06, |
| "loss": 0.2578, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.4794875705353058, |
| "grad_norm": 0.2365601659243881, |
| "learning_rate": 9.420221717130783e-06, |
| "loss": 0.2491, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.48009760561232273, |
| "grad_norm": 0.24468779134408927, |
| "learning_rate": 9.417732104140243e-06, |
| "loss": 0.2361, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.48070764068933963, |
| "grad_norm": 0.25848472600184347, |
| "learning_rate": 9.415237487736452e-06, |
| "loss": 0.2455, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.4813176757663566, |
| "grad_norm": 0.24659632129132983, |
| "learning_rate": 9.412737870744752e-06, |
| "loss": 0.259, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.4819277108433735, |
| "grad_norm": 0.2376635659932075, |
| "learning_rate": 9.410233255996146e-06, |
| "loss": 0.2486, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.48253774592039045, |
| "grad_norm": 0.26330890674932006, |
| "learning_rate": 9.407723646327299e-06, |
| "loss": 0.2724, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.48314778099740735, |
| "grad_norm": 0.2965239250982198, |
| "learning_rate": 9.40520904458053e-06, |
| "loss": 0.2246, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.48375781607442425, |
| "grad_norm": 0.2462468999949858, |
| "learning_rate": 9.402689453603815e-06, |
| "loss": 0.2877, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.4843678511514412, |
| "grad_norm": 0.390541205009224, |
| "learning_rate": 9.400164876250781e-06, |
| "loss": 0.2237, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.4849778862284581, |
| "grad_norm": 0.2487914992956595, |
| "learning_rate": 9.3976353153807e-06, |
| "loss": 0.2456, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.48558792130547507, |
| "grad_norm": 0.22828415306478353, |
| "learning_rate": 9.395100773858492e-06, |
| "loss": 0.24, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.48619795638249197, |
| "grad_norm": 0.2651952079542631, |
| "learning_rate": 9.392561254554712e-06, |
| "loss": 0.2479, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.4868079914595089, |
| "grad_norm": 0.2186420215795485, |
| "learning_rate": 9.39001676034556e-06, |
| "loss": 0.2062, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.48741802653652583, |
| "grad_norm": 0.23488406613309, |
| "learning_rate": 9.387467294112864e-06, |
| "loss": 0.2374, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.4880280616135428, |
| "grad_norm": 0.2335215343559726, |
| "learning_rate": 9.384912858744088e-06, |
| "loss": 0.252, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.4886380966905597, |
| "grad_norm": 0.24953746869431825, |
| "learning_rate": 9.382353457132318e-06, |
| "loss": 0.2427, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.48924813176757664, |
| "grad_norm": 0.2328812255084704, |
| "learning_rate": 9.379789092176275e-06, |
| "loss": 0.229, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.48985816684459355, |
| "grad_norm": 0.22737943989850504, |
| "learning_rate": 9.377219766780288e-06, |
| "loss": 0.2239, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.4904682019216105, |
| "grad_norm": 0.23159191713799232, |
| "learning_rate": 9.374645483854315e-06, |
| "loss": 0.2438, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.4910782369986274, |
| "grad_norm": 0.22823543143777628, |
| "learning_rate": 9.372066246313922e-06, |
| "loss": 0.2145, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.49168827207564436, |
| "grad_norm": 0.24969394403631143, |
| "learning_rate": 9.369482057080293e-06, |
| "loss": 0.2089, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.49229830715266126, |
| "grad_norm": 0.23891653993332385, |
| "learning_rate": 9.366892919080213e-06, |
| "loss": 0.2488, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.4929083422296782, |
| "grad_norm": 0.24089267103927142, |
| "learning_rate": 9.364298835246074e-06, |
| "loss": 0.2459, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.4935183773066951, |
| "grad_norm": 0.2489514639584053, |
| "learning_rate": 9.361699808515877e-06, |
| "loss": 0.2434, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.4941284123837121, |
| "grad_norm": 0.2184131072697668, |
| "learning_rate": 9.359095841833206e-06, |
| "loss": 0.2084, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.494738447460729, |
| "grad_norm": 0.23816642896153215, |
| "learning_rate": 9.356486938147256e-06, |
| "loss": 0.2271, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.49534848253774594, |
| "grad_norm": 0.23819781990228103, |
| "learning_rate": 9.353873100412805e-06, |
| "loss": 0.2371, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.49595851761476284, |
| "grad_norm": 0.23618169911622136, |
| "learning_rate": 9.351254331590216e-06, |
| "loss": 0.2276, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.4965685526917798, |
| "grad_norm": 0.26104490763327737, |
| "learning_rate": 9.348630634645446e-06, |
| "loss": 0.2497, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.4971785877687967, |
| "grad_norm": 0.2271620479721002, |
| "learning_rate": 9.346002012550027e-06, |
| "loss": 0.2464, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.49778862284581366, |
| "grad_norm": 0.2323705973707938, |
| "learning_rate": 9.34336846828107e-06, |
| "loss": 0.2491, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.49839865792283056, |
| "grad_norm": 0.21723754684916077, |
| "learning_rate": 9.340730004821266e-06, |
| "loss": 0.224, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.4990086929998475, |
| "grad_norm": 0.23734482056770834, |
| "learning_rate": 9.338086625158867e-06, |
| "loss": 0.2391, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.4996187280768644, |
| "grad_norm": 0.22493990824961632, |
| "learning_rate": 9.3354383322877e-06, |
| "loss": 0.2414, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.5002287631538813, |
| "grad_norm": 0.23763953888935077, |
| "learning_rate": 9.33278512920716e-06, |
| "loss": 0.257, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.5008387982308983, |
| "grad_norm": 0.23206692778749136, |
| "learning_rate": 9.330127018922195e-06, |
| "loss": 0.2389, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.5014488333079152, |
| "grad_norm": 0.22541539391517743, |
| "learning_rate": 9.327464004443315e-06, |
| "loss": 0.2336, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.5020588683849322, |
| "grad_norm": 0.24256078896560065, |
| "learning_rate": 9.324796088786581e-06, |
| "loss": 0.2429, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.502668903461949, |
| "grad_norm": 0.2468862949593829, |
| "learning_rate": 9.322123274973613e-06, |
| "loss": 0.2355, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.503278938538966, |
| "grad_norm": 0.22384411091337553, |
| "learning_rate": 9.31944556603157e-06, |
| "loss": 0.2213, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.503888973615983, |
| "grad_norm": 0.2514036453108826, |
| "learning_rate": 9.31676296499316e-06, |
| "loss": 0.2463, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.5044990086929998, |
| "grad_norm": 0.24925114054301487, |
| "learning_rate": 9.314075474896631e-06, |
| "loss": 0.2551, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.5051090437700168, |
| "grad_norm": 0.23276254658106843, |
| "learning_rate": 9.311383098785765e-06, |
| "loss": 0.2332, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.5057190788470337, |
| "grad_norm": 0.2370474417278097, |
| "learning_rate": 9.308685839709878e-06, |
| "loss": 0.2613, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.5063291139240507, |
| "grad_norm": 0.22872370038160483, |
| "learning_rate": 9.305983700723824e-06, |
| "loss": 0.2372, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.5069391490010675, |
| "grad_norm": 0.24716515266866768, |
| "learning_rate": 9.303276684887973e-06, |
| "loss": 0.2552, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.5075491840780845, |
| "grad_norm": 0.2265318455600586, |
| "learning_rate": 9.30056479526823e-06, |
| "loss": 0.2368, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.5081592191551014, |
| "grad_norm": 0.23946821858214107, |
| "learning_rate": 9.297848034936007e-06, |
| "loss": 0.2269, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.5087692542321184, |
| "grad_norm": 0.26397442672414717, |
| "learning_rate": 9.295126406968241e-06, |
| "loss": 0.2524, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.5093792893091352, |
| "grad_norm": 0.21612815204717434, |
| "learning_rate": 9.292399914447381e-06, |
| "loss": 0.2177, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.5099893243861522, |
| "grad_norm": 0.23050449682089552, |
| "learning_rate": 9.289668560461385e-06, |
| "loss": 0.2382, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.5105993594631691, |
| "grad_norm": 0.25879227486567125, |
| "learning_rate": 9.286932348103716e-06, |
| "loss": 0.2786, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.5112093945401861, |
| "grad_norm": 0.23581274681256434, |
| "learning_rate": 9.284191280473338e-06, |
| "loss": 0.2475, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.511819429617203, |
| "grad_norm": 0.23997499104246758, |
| "learning_rate": 9.281445360674717e-06, |
| "loss": 0.2421, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.5124294646942199, |
| "grad_norm": 0.23196299343759688, |
| "learning_rate": 9.278694591817814e-06, |
| "loss": 0.2537, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.5130394997712369, |
| "grad_norm": 0.23865479598143985, |
| "learning_rate": 9.275938977018082e-06, |
| "loss": 0.2429, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.5136495348482538, |
| "grad_norm": 0.22539671688187207, |
| "learning_rate": 9.273178519396459e-06, |
| "loss": 0.235, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.5142595699252707, |
| "grad_norm": 0.2529599698610769, |
| "learning_rate": 9.270413222079373e-06, |
| "loss": 0.2516, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.5148696050022876, |
| "grad_norm": 0.257432749177655, |
| "learning_rate": 9.26764308819873e-06, |
| "loss": 0.251, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.5154796400793046, |
| "grad_norm": 0.23477208951640044, |
| "learning_rate": 9.264868120891913e-06, |
| "loss": 0.2448, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.5160896751563215, |
| "grad_norm": 0.2599692203117331, |
| "learning_rate": 9.262088323301782e-06, |
| "loss": 0.2395, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.5166997102333384, |
| "grad_norm": 0.2391225469089055, |
| "learning_rate": 9.259303698576669e-06, |
| "loss": 0.2352, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.5173097453103553, |
| "grad_norm": 0.2610324632293431, |
| "learning_rate": 9.256514249870366e-06, |
| "loss": 0.242, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.5179197803873723, |
| "grad_norm": 0.22258363931378589, |
| "learning_rate": 9.253719980342134e-06, |
| "loss": 0.2384, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.5185298154643893, |
| "grad_norm": 0.32551326135551306, |
| "learning_rate": 9.250920893156696e-06, |
| "loss": 0.2443, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.5191398505414061, |
| "grad_norm": 0.23679924925359405, |
| "learning_rate": 9.24811699148423e-06, |
| "loss": 0.23, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.5197498856184231, |
| "grad_norm": 0.22408751438114305, |
| "learning_rate": 9.245308278500355e-06, |
| "loss": 0.2163, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.52035992069544, |
| "grad_norm": 0.21958835705262147, |
| "learning_rate": 9.24249475738616e-06, |
| "loss": 0.2357, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.5209699557724569, |
| "grad_norm": 0.23911714756619634, |
| "learning_rate": 9.239676431328164e-06, |
| "loss": 0.2427, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.5215799908494738, |
| "grad_norm": 0.22342842695193937, |
| "learning_rate": 9.236853303518333e-06, |
| "loss": 0.2375, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.5221900259264908, |
| "grad_norm": 0.27681328656512555, |
| "learning_rate": 9.234025377154073e-06, |
| "loss": 0.2572, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.5228000610035077, |
| "grad_norm": 0.25175601110086143, |
| "learning_rate": 9.231192655438222e-06, |
| "loss": 0.2491, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.5234100960805246, |
| "grad_norm": 0.2637545918682464, |
| "learning_rate": 9.228355141579048e-06, |
| "loss": 0.2643, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.5240201311575415, |
| "grad_norm": 0.24678037303000705, |
| "learning_rate": 9.225512838790252e-06, |
| "loss": 0.2295, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.5246301662345585, |
| "grad_norm": 0.25271929900504264, |
| "learning_rate": 9.222665750290953e-06, |
| "loss": 0.2332, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.5252402013115754, |
| "grad_norm": 0.2346919500276136, |
| "learning_rate": 9.219813879305692e-06, |
| "loss": 0.2612, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.5258502363885923, |
| "grad_norm": 0.25570687887348353, |
| "learning_rate": 9.21695722906443e-06, |
| "loss": 0.2395, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.5264602714656093, |
| "grad_norm": 0.27891063472335015, |
| "learning_rate": 9.214095802802533e-06, |
| "loss": 0.25, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.5270703065426262, |
| "grad_norm": 0.23748539072986685, |
| "learning_rate": 9.211229603760787e-06, |
| "loss": 0.2344, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.5276803416196432, |
| "grad_norm": 0.22278988238334246, |
| "learning_rate": 9.208358635185372e-06, |
| "loss": 0.2276, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.52829037669666, |
| "grad_norm": 0.2661064237307424, |
| "learning_rate": 9.20548290032788e-06, |
| "loss": 0.2404, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.528900411773677, |
| "grad_norm": 0.2482715675689319, |
| "learning_rate": 9.202602402445294e-06, |
| "loss": 0.231, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.5295104468506939, |
| "grad_norm": 0.23627964937286555, |
| "learning_rate": 9.199717144799994e-06, |
| "loss": 0.2182, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.5301204819277109, |
| "grad_norm": 0.23312268728117894, |
| "learning_rate": 9.196827130659752e-06, |
| "loss": 0.231, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.5307305170047277, |
| "grad_norm": 0.2521885753183132, |
| "learning_rate": 9.193932363297724e-06, |
| "loss": 0.2152, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.5313405520817447, |
| "grad_norm": 0.2572040177255539, |
| "learning_rate": 9.191032845992453e-06, |
| "loss": 0.231, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.5319505871587616, |
| "grad_norm": 0.24710288355302004, |
| "learning_rate": 9.18812858202786e-06, |
| "loss": 0.2555, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.5325606222357786, |
| "grad_norm": 0.2486931954461719, |
| "learning_rate": 9.185219574693242e-06, |
| "loss": 0.2306, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.5331706573127954, |
| "grad_norm": 0.2542190718905183, |
| "learning_rate": 9.18230582728327e-06, |
| "loss": 0.271, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.5337806923898124, |
| "grad_norm": 0.24410915290177013, |
| "learning_rate": 9.179387343097978e-06, |
| "loss": 0.2576, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.5343907274668294, |
| "grad_norm": 0.22953871125837036, |
| "learning_rate": 9.17646412544277e-06, |
| "loss": 0.2488, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.5350007625438463, |
| "grad_norm": 0.25812105965097, |
| "learning_rate": 9.17353617762841e-06, |
| "loss": 0.2596, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.5356107976208632, |
| "grad_norm": 0.26246862819412475, |
| "learning_rate": 9.170603502971017e-06, |
| "loss": 0.2496, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.5362208326978801, |
| "grad_norm": 0.22330376240227373, |
| "learning_rate": 9.167666104792067e-06, |
| "loss": 0.2296, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.5368308677748971, |
| "grad_norm": 0.23681969799356747, |
| "learning_rate": 9.164723986418385e-06, |
| "loss": 0.2319, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.537440902851914, |
| "grad_norm": 0.2445777883564648, |
| "learning_rate": 9.161777151182137e-06, |
| "loss": 0.2382, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.5380509379289309, |
| "grad_norm": 0.240934911695689, |
| "learning_rate": 9.158825602420838e-06, |
| "loss": 0.2514, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.5386609730059478, |
| "grad_norm": 0.23990828086840085, |
| "learning_rate": 9.155869343477339e-06, |
| "loss": 0.2412, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.5392710080829648, |
| "grad_norm": 0.22730222183887502, |
| "learning_rate": 9.152908377699823e-06, |
| "loss": 0.2334, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.5398810431599816, |
| "grad_norm": 0.22530806978627918, |
| "learning_rate": 9.149942708441808e-06, |
| "loss": 0.2341, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.5404910782369986, |
| "grad_norm": 0.24120868240608, |
| "learning_rate": 9.146972339062136e-06, |
| "loss": 0.2541, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.5411011133140156, |
| "grad_norm": 0.308352315358742, |
| "learning_rate": 9.143997272924974e-06, |
| "loss": 0.2423, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.5417111483910325, |
| "grad_norm": 0.25467840429327515, |
| "learning_rate": 9.141017513399806e-06, |
| "loss": 0.2497, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.5423211834680494, |
| "grad_norm": 0.2422144432264907, |
| "learning_rate": 9.138033063861436e-06, |
| "loss": 0.2685, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.5429312185450663, |
| "grad_norm": 0.25084723479258747, |
| "learning_rate": 9.135043927689975e-06, |
| "loss": 0.2681, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.5435412536220833, |
| "grad_norm": 0.2496126574784138, |
| "learning_rate": 9.132050108270845e-06, |
| "loss": 0.2412, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.5441512886991002, |
| "grad_norm": 0.24937237576609259, |
| "learning_rate": 9.129051608994773e-06, |
| "loss": 0.2455, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.5447613237761171, |
| "grad_norm": 0.24410548774952506, |
| "learning_rate": 9.12604843325778e-06, |
| "loss": 0.2531, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.545371358853134, |
| "grad_norm": 0.25039944675489567, |
| "learning_rate": 9.123040584461192e-06, |
| "loss": 0.2581, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.545981393930151, |
| "grad_norm": 0.24217578431818285, |
| "learning_rate": 9.120028066011621e-06, |
| "loss": 0.2642, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.546591429007168, |
| "grad_norm": 0.23799501463139952, |
| "learning_rate": 9.117010881320973e-06, |
| "loss": 0.2445, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.5472014640841848, |
| "grad_norm": 0.2328066882782319, |
| "learning_rate": 9.113989033806434e-06, |
| "loss": 0.2446, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.5478114991612018, |
| "grad_norm": 0.2256128476716643, |
| "learning_rate": 9.110962526890474e-06, |
| "loss": 0.2261, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.5484215342382187, |
| "grad_norm": 0.24277658311529213, |
| "learning_rate": 9.10793136400084e-06, |
| "loss": 0.2533, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.5490315693152357, |
| "grad_norm": 0.2244964492427359, |
| "learning_rate": 9.104895548570549e-06, |
| "loss": 0.2417, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.5496416043922525, |
| "grad_norm": 0.23817530380584634, |
| "learning_rate": 9.101855084037893e-06, |
| "loss": 0.2299, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.5502516394692695, |
| "grad_norm": 0.21462378141591873, |
| "learning_rate": 9.098809973846425e-06, |
| "loss": 0.2282, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.5508616745462864, |
| "grad_norm": 0.22887797228368942, |
| "learning_rate": 9.09576022144496e-06, |
| "loss": 0.2188, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.5514717096233034, |
| "grad_norm": 0.2317966772564381, |
| "learning_rate": 9.092705830287572e-06, |
| "loss": 0.2339, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.5520817447003202, |
| "grad_norm": 0.23477031220665048, |
| "learning_rate": 9.089646803833589e-06, |
| "loss": 0.2334, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.5526917797773372, |
| "grad_norm": 0.24063497257307356, |
| "learning_rate": 9.086583145547588e-06, |
| "loss": 0.238, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.5533018148543541, |
| "grad_norm": 0.24278984054747293, |
| "learning_rate": 9.083514858899391e-06, |
| "loss": 0.2422, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.5539118499313711, |
| "grad_norm": 0.24698518564289468, |
| "learning_rate": 9.080441947364065e-06, |
| "loss": 0.2354, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.554521885008388, |
| "grad_norm": 0.2573974858694405, |
| "learning_rate": 9.07736441442191e-06, |
| "loss": 0.2366, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.5551319200854049, |
| "grad_norm": 0.23686076398195488, |
| "learning_rate": 9.074282263558465e-06, |
| "loss": 0.2332, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.5557419551624219, |
| "grad_norm": 0.26716589782054984, |
| "learning_rate": 9.071195498264497e-06, |
| "loss": 0.23, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.5563519902394388, |
| "grad_norm": 0.23254352826520663, |
| "learning_rate": 9.068104122036e-06, |
| "loss": 0.2261, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.5569620253164557, |
| "grad_norm": 0.23774506033550974, |
| "learning_rate": 9.06500813837419e-06, |
| "loss": 0.2538, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.5575720603934726, |
| "grad_norm": 0.24578732684387927, |
| "learning_rate": 9.061907550785498e-06, |
| "loss": 0.2267, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.5581820954704896, |
| "grad_norm": 0.24273173792130215, |
| "learning_rate": 9.058802362781576e-06, |
| "loss": 0.2429, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.5587921305475064, |
| "grad_norm": 0.24871249511209675, |
| "learning_rate": 9.055692577879279e-06, |
| "loss": 0.2645, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.5594021656245234, |
| "grad_norm": 0.2489675357704421, |
| "learning_rate": 9.052578199600675e-06, |
| "loss": 0.2641, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.5600122007015403, |
| "grad_norm": 0.2348175485183629, |
| "learning_rate": 9.049459231473028e-06, |
| "loss": 0.2179, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.5606222357785573, |
| "grad_norm": 0.24516386553768676, |
| "learning_rate": 9.046335677028806e-06, |
| "loss": 0.2398, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.5612322708555741, |
| "grad_norm": 0.2539618785828311, |
| "learning_rate": 9.04320753980567e-06, |
| "loss": 0.2443, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.5618423059325911, |
| "grad_norm": 0.2375763291210803, |
| "learning_rate": 9.040074823346466e-06, |
| "loss": 0.2319, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.5624523410096081, |
| "grad_norm": 0.2295019349307068, |
| "learning_rate": 9.036937531199235e-06, |
| "loss": 0.2174, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.563062376086625, |
| "grad_norm": 0.23913641148482256, |
| "learning_rate": 9.033795666917191e-06, |
| "loss": 0.2336, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.5636724111636419, |
| "grad_norm": 0.2365015694994553, |
| "learning_rate": 9.030649234058738e-06, |
| "loss": 0.2288, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.5642824462406588, |
| "grad_norm": 0.2269035714506007, |
| "learning_rate": 9.02749823618744e-06, |
| "loss": 0.2359, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.5648924813176758, |
| "grad_norm": 0.23862880675583445, |
| "learning_rate": 9.024342676872044e-06, |
| "loss": 0.2321, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.5655025163946927, |
| "grad_norm": 0.24866509535913342, |
| "learning_rate": 9.021182559686454e-06, |
| "loss": 0.2517, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.5661125514717096, |
| "grad_norm": 0.22871794221013542, |
| "learning_rate": 9.018017888209744e-06, |
| "loss": 0.2467, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.5667225865487265, |
| "grad_norm": 0.25107482056399233, |
| "learning_rate": 9.014848666026138e-06, |
| "loss": 0.2661, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.5673326216257435, |
| "grad_norm": 0.23633368721157266, |
| "learning_rate": 9.01167489672502e-06, |
| "loss": 0.2463, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.5679426567027605, |
| "grad_norm": 0.2469957443857779, |
| "learning_rate": 9.008496583900925e-06, |
| "loss": 0.2485, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.5685526917797773, |
| "grad_norm": 0.23023949616042658, |
| "learning_rate": 9.005313731153525e-06, |
| "loss": 0.2401, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.5691627268567943, |
| "grad_norm": 0.2324877334752998, |
| "learning_rate": 9.002126342087643e-06, |
| "loss": 0.2297, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.5697727619338112, |
| "grad_norm": 0.234321661561038, |
| "learning_rate": 8.998934420313236e-06, |
| "loss": 0.2384, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.5703827970108282, |
| "grad_norm": 0.24123765281463405, |
| "learning_rate": 8.995737969445395e-06, |
| "loss": 0.2352, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.570992832087845, |
| "grad_norm": 0.23544270953963511, |
| "learning_rate": 8.992536993104339e-06, |
| "loss": 0.2304, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.571602867164862, |
| "grad_norm": 0.24273749530372107, |
| "learning_rate": 8.989331494915417e-06, |
| "loss": 0.2376, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.5722129022418789, |
| "grad_norm": 0.23238771153358787, |
| "learning_rate": 8.986121478509096e-06, |
| "loss": 0.2325, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.5728229373188959, |
| "grad_norm": 0.25536548436456125, |
| "learning_rate": 8.982906947520958e-06, |
| "loss": 0.2471, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.5734329723959127, |
| "grad_norm": 0.2277984497148008, |
| "learning_rate": 8.9796879055917e-06, |
| "loss": 0.2329, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.5740430074729297, |
| "grad_norm": 0.3157076986257636, |
| "learning_rate": 8.976464356367133e-06, |
| "loss": 0.2472, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.5746530425499466, |
| "grad_norm": 0.2431944807189335, |
| "learning_rate": 8.973236303498165e-06, |
| "loss": 0.2265, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.5752630776269636, |
| "grad_norm": 0.2415392811651247, |
| "learning_rate": 8.97000375064081e-06, |
| "loss": 0.2401, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.5758731127039804, |
| "grad_norm": 0.24697514473461818, |
| "learning_rate": 8.966766701456177e-06, |
| "loss": 0.2481, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.5764831477809974, |
| "grad_norm": 0.2310906025904227, |
| "learning_rate": 8.963525159610465e-06, |
| "loss": 0.2204, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.5770931828580144, |
| "grad_norm": 0.3167379712129428, |
| "learning_rate": 8.960279128774965e-06, |
| "loss": 0.2319, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.5777032179350312, |
| "grad_norm": 0.2532736663817971, |
| "learning_rate": 8.957028612626051e-06, |
| "loss": 0.2571, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.5783132530120482, |
| "grad_norm": 0.2709052682297909, |
| "learning_rate": 8.953773614845175e-06, |
| "loss": 0.2432, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.5789232880890651, |
| "grad_norm": 0.24875501207670353, |
| "learning_rate": 8.950514139118868e-06, |
| "loss": 0.2381, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.5795333231660821, |
| "grad_norm": 0.2386346874806699, |
| "learning_rate": 8.947250189138732e-06, |
| "loss": 0.2541, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.5801433582430989, |
| "grad_norm": 0.23503372535899722, |
| "learning_rate": 8.943981768601431e-06, |
| "loss": 0.2399, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.5807533933201159, |
| "grad_norm": 0.23484091677557625, |
| "learning_rate": 8.9407088812087e-06, |
| "loss": 0.2385, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.5813634283971328, |
| "grad_norm": 0.2786973828276919, |
| "learning_rate": 8.937431530667329e-06, |
| "loss": 0.2768, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.5819734634741498, |
| "grad_norm": 0.24501647742704377, |
| "learning_rate": 8.934149720689164e-06, |
| "loss": 0.2405, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.5825834985511666, |
| "grad_norm": 0.24403169887634135, |
| "learning_rate": 8.9308634549911e-06, |
| "loss": 0.2548, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.5831935336281836, |
| "grad_norm": 0.2272567853866675, |
| "learning_rate": 8.927572737295081e-06, |
| "loss": 0.2418, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.5838035687052006, |
| "grad_norm": 0.24563126522644446, |
| "learning_rate": 8.924277571328091e-06, |
| "loss": 0.2529, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.5844136037822175, |
| "grad_norm": 0.2364803637356654, |
| "learning_rate": 8.92097796082215e-06, |
| "loss": 0.2267, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.5850236388592344, |
| "grad_norm": 0.24044223758674313, |
| "learning_rate": 8.917673909514321e-06, |
| "loss": 0.2749, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.5856336739362513, |
| "grad_norm": 0.2235920684260497, |
| "learning_rate": 8.914365421146685e-06, |
| "loss": 0.2375, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.5862437090132683, |
| "grad_norm": 0.24233427434900542, |
| "learning_rate": 8.911052499466358e-06, |
| "loss": 0.2574, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.5868537440902852, |
| "grad_norm": 0.2199417231731091, |
| "learning_rate": 8.907735148225465e-06, |
| "loss": 0.2321, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.5874637791673021, |
| "grad_norm": 0.24156632473583037, |
| "learning_rate": 8.904413371181164e-06, |
| "loss": 0.2362, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.588073814244319, |
| "grad_norm": 0.24319106385211212, |
| "learning_rate": 8.90108717209561e-06, |
| "loss": 0.2524, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.588683849321336, |
| "grad_norm": 0.21780477785194519, |
| "learning_rate": 8.897756554735976e-06, |
| "loss": 0.2375, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.589293884398353, |
| "grad_norm": 0.2631983155577083, |
| "learning_rate": 8.894421522874438e-06, |
| "loss": 0.2531, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.5899039194753698, |
| "grad_norm": 0.24907326563896523, |
| "learning_rate": 8.891082080288167e-06, |
| "loss": 0.2501, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.5905139545523868, |
| "grad_norm": 0.2183747192980442, |
| "learning_rate": 8.887738230759334e-06, |
| "loss": 0.2476, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.5911239896294037, |
| "grad_norm": 0.2429805274522966, |
| "learning_rate": 8.884389978075098e-06, |
| "loss": 0.2394, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.5917340247064207, |
| "grad_norm": 0.26143482773747845, |
| "learning_rate": 8.88103732602761e-06, |
| "loss": 0.2501, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.5923440597834375, |
| "grad_norm": 0.24008352338552205, |
| "learning_rate": 8.877680278413995e-06, |
| "loss": 0.2301, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.5929540948604545, |
| "grad_norm": 0.2504323744037465, |
| "learning_rate": 8.874318839036367e-06, |
| "loss": 0.2632, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.5935641299374714, |
| "grad_norm": 0.22502293301141857, |
| "learning_rate": 8.870953011701804e-06, |
| "loss": 0.2138, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.5941741650144884, |
| "grad_norm": 0.25060434823535316, |
| "learning_rate": 8.867582800222358e-06, |
| "loss": 0.216, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.5947842000915052, |
| "grad_norm": 0.22294734491003537, |
| "learning_rate": 8.864208208415051e-06, |
| "loss": 0.2355, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.5953942351685222, |
| "grad_norm": 0.23476612585041143, |
| "learning_rate": 8.860829240101858e-06, |
| "loss": 0.2454, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.5960042702455391, |
| "grad_norm": 0.24478825993869993, |
| "learning_rate": 8.857445899109716e-06, |
| "loss": 0.2362, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.596614305322556, |
| "grad_norm": 0.23675812972527574, |
| "learning_rate": 8.85405818927051e-06, |
| "loss": 0.2433, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.597224340399573, |
| "grad_norm": 0.23289797925718586, |
| "learning_rate": 8.85066611442108e-06, |
| "loss": 0.2616, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.5978343754765899, |
| "grad_norm": 0.22975309494536794, |
| "learning_rate": 8.847269678403202e-06, |
| "loss": 0.2372, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.5984444105536069, |
| "grad_norm": 0.25014127869116004, |
| "learning_rate": 8.843868885063594e-06, |
| "loss": 0.2481, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.5990544456306237, |
| "grad_norm": 0.235261338075581, |
| "learning_rate": 8.840463738253913e-06, |
| "loss": 0.224, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.5996644807076407, |
| "grad_norm": 0.263864345048999, |
| "learning_rate": 8.83705424183074e-06, |
| "loss": 0.2315, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.6002745157846576, |
| "grad_norm": 0.2313037025839392, |
| "learning_rate": 8.83364039965559e-06, |
| "loss": 0.2343, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.6008845508616746, |
| "grad_norm": 0.2316483778451406, |
| "learning_rate": 8.83022221559489e-06, |
| "loss": 0.2446, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.6014945859386914, |
| "grad_norm": 0.2635286913095841, |
| "learning_rate": 8.826799693519996e-06, |
| "loss": 0.2477, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.6021046210157084, |
| "grad_norm": 0.2402482306391233, |
| "learning_rate": 8.823372837307167e-06, |
| "loss": 0.2513, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.6027146560927253, |
| "grad_norm": 0.2402515762798396, |
| "learning_rate": 8.819941650837576e-06, |
| "loss": 0.2607, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.6033246911697423, |
| "grad_norm": 0.24283381310704727, |
| "learning_rate": 8.8165061379973e-06, |
| "loss": 0.2366, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.6039347262467591, |
| "grad_norm": 0.25297933475508455, |
| "learning_rate": 8.813066302677314e-06, |
| "loss": 0.2363, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.6045447613237761, |
| "grad_norm": 0.2449648651812992, |
| "learning_rate": 8.809622148773492e-06, |
| "loss": 0.2499, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.6051547964007931, |
| "grad_norm": 0.26539602479579855, |
| "learning_rate": 8.806173680186594e-06, |
| "loss": 0.2663, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.60576483147781, |
| "grad_norm": 0.2514174464993971, |
| "learning_rate": 8.80272090082227e-06, |
| "loss": 0.2418, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.6063748665548269, |
| "grad_norm": 0.26721468418789796, |
| "learning_rate": 8.799263814591053e-06, |
| "loss": 0.2289, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.6069849016318438, |
| "grad_norm": 0.25553432819619254, |
| "learning_rate": 8.795802425408352e-06, |
| "loss": 0.2732, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 0.2376491834604695, |
| "learning_rate": 8.792336737194452e-06, |
| "loss": 0.2329, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.6082049717858777, |
| "grad_norm": 0.234012654353243, |
| "learning_rate": 8.788866753874504e-06, |
| "loss": 0.2178, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.6088150068628946, |
| "grad_norm": 0.23538003697799972, |
| "learning_rate": 8.785392479378522e-06, |
| "loss": 0.2324, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.6094250419399115, |
| "grad_norm": 0.24857362136607877, |
| "learning_rate": 8.781913917641385e-06, |
| "loss": 0.2392, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.6100350770169285, |
| "grad_norm": 0.2346747879400166, |
| "learning_rate": 8.778431072602825e-06, |
| "loss": 0.2271, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.6100350770169285, |
| "eval_loss": 0.23594006896018982, |
| "eval_runtime": 267.1885, |
| "eval_samples_per_second": 4.005, |
| "eval_steps_per_second": 0.127, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.6106451120939455, |
| "grad_norm": 0.21228502344726596, |
| "learning_rate": 8.774943948207427e-06, |
| "loss": 0.2289, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.6112551471709623, |
| "grad_norm": 0.2500646392480775, |
| "learning_rate": 8.771452548404618e-06, |
| "loss": 0.2441, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.6118651822479793, |
| "grad_norm": 0.23169540639899572, |
| "learning_rate": 8.767956877148671e-06, |
| "loss": 0.242, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.6124752173249962, |
| "grad_norm": 0.22760124382669475, |
| "learning_rate": 8.7644569383987e-06, |
| "loss": 0.2552, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.6130852524020132, |
| "grad_norm": 0.2484290973427134, |
| "learning_rate": 8.760952736118645e-06, |
| "loss": 0.2324, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.61369528747903, |
| "grad_norm": 0.2359583380201497, |
| "learning_rate": 8.757444274277277e-06, |
| "loss": 0.2175, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.614305322556047, |
| "grad_norm": 0.23220814369598353, |
| "learning_rate": 8.753931556848195e-06, |
| "loss": 0.2334, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.6149153576330639, |
| "grad_norm": 0.27498536282455094, |
| "learning_rate": 8.750414587809815e-06, |
| "loss": 0.2582, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.6155253927100808, |
| "grad_norm": 0.24078199124375543, |
| "learning_rate": 8.746893371145367e-06, |
| "loss": 0.2317, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.6161354277870977, |
| "grad_norm": 0.23280560001762854, |
| "learning_rate": 8.743367910842895e-06, |
| "loss": 0.2349, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.6167454628641147, |
| "grad_norm": 0.23708670544193472, |
| "learning_rate": 8.739838210895244e-06, |
| "loss": 0.2215, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.6173554979411316, |
| "grad_norm": 0.236517827135338, |
| "learning_rate": 8.736304275300069e-06, |
| "loss": 0.2372, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.6179655330181485, |
| "grad_norm": 0.22912668594274044, |
| "learning_rate": 8.732766108059814e-06, |
| "loss": 0.2253, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.6185755680951655, |
| "grad_norm": 0.23350025433250618, |
| "learning_rate": 8.72922371318172e-06, |
| "loss": 0.2274, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.6191856031721824, |
| "grad_norm": 0.23095428571608784, |
| "learning_rate": 8.725677094677816e-06, |
| "loss": 0.2349, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.6197956382491994, |
| "grad_norm": 0.2458511385558197, |
| "learning_rate": 8.722126256564911e-06, |
| "loss": 0.2405, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.6204056733262162, |
| "grad_norm": 0.26209582180802815, |
| "learning_rate": 8.718571202864598e-06, |
| "loss": 0.2391, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.6210157084032332, |
| "grad_norm": 0.23368392839276564, |
| "learning_rate": 8.715011937603243e-06, |
| "loss": 0.2296, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.6216257434802501, |
| "grad_norm": 0.23329724132770577, |
| "learning_rate": 8.711448464811978e-06, |
| "loss": 0.2425, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.6222357785572671, |
| "grad_norm": 0.23902811017652673, |
| "learning_rate": 8.707880788526708e-06, |
| "loss": 0.2478, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.6228458136342839, |
| "grad_norm": 0.41056486667562464, |
| "learning_rate": 8.70430891278809e-06, |
| "loss": 0.2198, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.6234558487113009, |
| "grad_norm": 0.22537846767846592, |
| "learning_rate": 8.700732841641542e-06, |
| "loss": 0.2273, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.6240658837883178, |
| "grad_norm": 0.44369492215713807, |
| "learning_rate": 8.697152579137235e-06, |
| "loss": 0.235, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.6246759188653348, |
| "grad_norm": 0.23115109526114427, |
| "learning_rate": 8.693568129330083e-06, |
| "loss": 0.2273, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.6252859539423516, |
| "grad_norm": 0.23509942514935336, |
| "learning_rate": 8.689979496279747e-06, |
| "loss": 0.2222, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.6258959890193686, |
| "grad_norm": 0.2368473427816096, |
| "learning_rate": 8.68638668405062e-06, |
| "loss": 0.227, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.6265060240963856, |
| "grad_norm": 0.21340890219018352, |
| "learning_rate": 8.682789696711835e-06, |
| "loss": 0.222, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.6271160591734025, |
| "grad_norm": 0.26572592638064496, |
| "learning_rate": 8.679188538337248e-06, |
| "loss": 0.2567, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.6277260942504194, |
| "grad_norm": 0.22809021260408102, |
| "learning_rate": 8.675583213005443e-06, |
| "loss": 0.2279, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.6283361293274363, |
| "grad_norm": 0.23394941073505565, |
| "learning_rate": 8.671973724799719e-06, |
| "loss": 0.2365, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.6289461644044533, |
| "grad_norm": 0.23887151195100773, |
| "learning_rate": 8.668360077808093e-06, |
| "loss": 0.2472, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.6295561994814702, |
| "grad_norm": 0.22443946646077903, |
| "learning_rate": 8.664742276123293e-06, |
| "loss": 0.2356, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.6301662345584871, |
| "grad_norm": 0.24621520519113224, |
| "learning_rate": 8.661120323842751e-06, |
| "loss": 0.2312, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.630776269635504, |
| "grad_norm": 0.2292352894257401, |
| "learning_rate": 8.657494225068599e-06, |
| "loss": 0.2375, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.631386304712521, |
| "grad_norm": 0.23841878871186253, |
| "learning_rate": 8.653863983907665e-06, |
| "loss": 0.2423, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.631996339789538, |
| "grad_norm": 0.23381301415513847, |
| "learning_rate": 8.650229604471471e-06, |
| "loss": 0.217, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.6326063748665548, |
| "grad_norm": 0.2482976093582336, |
| "learning_rate": 8.646591090876225e-06, |
| "loss": 0.2387, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.6332164099435718, |
| "grad_norm": 0.2403988608376904, |
| "learning_rate": 8.642948447242816e-06, |
| "loss": 0.2294, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.6338264450205887, |
| "grad_norm": 0.25498061501682, |
| "learning_rate": 8.639301677696812e-06, |
| "loss": 0.254, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.6344364800976056, |
| "grad_norm": 0.2589882916862488, |
| "learning_rate": 8.635650786368452e-06, |
| "loss": 0.2442, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.6350465151746225, |
| "grad_norm": 0.23064501898207432, |
| "learning_rate": 8.631995777392645e-06, |
| "loss": 0.2291, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.6356565502516395, |
| "grad_norm": 0.2275940945269994, |
| "learning_rate": 8.628336654908965e-06, |
| "loss": 0.2256, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.6362665853286564, |
| "grad_norm": 0.2329033335658531, |
| "learning_rate": 8.62467342306164e-06, |
| "loss": 0.2364, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.6368766204056733, |
| "grad_norm": 0.22329035062909994, |
| "learning_rate": 8.621006085999557e-06, |
| "loss": 0.2302, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.6374866554826902, |
| "grad_norm": 0.24244570767153245, |
| "learning_rate": 8.61733464787625e-06, |
| "loss": 0.2568, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.6380966905597072, |
| "grad_norm": 0.23192088392396648, |
| "learning_rate": 8.613659112849898e-06, |
| "loss": 0.2563, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.6387067256367241, |
| "grad_norm": 0.24548506765765826, |
| "learning_rate": 8.609979485083319e-06, |
| "loss": 0.2491, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.639316760713741, |
| "grad_norm": 0.22389446741910482, |
| "learning_rate": 8.606295768743971e-06, |
| "loss": 0.2235, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.639926795790758, |
| "grad_norm": 0.22417184612529703, |
| "learning_rate": 8.602607968003935e-06, |
| "loss": 0.2241, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.6405368308677749, |
| "grad_norm": 0.25746416694776264, |
| "learning_rate": 8.598916087039929e-06, |
| "loss": 0.2283, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.6411468659447919, |
| "grad_norm": 0.24729586756768165, |
| "learning_rate": 8.595220130033278e-06, |
| "loss": 0.225, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.6417569010218087, |
| "grad_norm": 0.22642669069332533, |
| "learning_rate": 8.591520101169932e-06, |
| "loss": 0.2142, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.6423669360988257, |
| "grad_norm": 0.23342351734889488, |
| "learning_rate": 8.587816004640456e-06, |
| "loss": 0.2225, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.6429769711758426, |
| "grad_norm": 0.23649016461111497, |
| "learning_rate": 8.584107844640015e-06, |
| "loss": 0.2223, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.6435870062528596, |
| "grad_norm": 0.23848683592171122, |
| "learning_rate": 8.580395625368377e-06, |
| "loss": 0.2374, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.6441970413298764, |
| "grad_norm": 0.2284818810785837, |
| "learning_rate": 8.57667935102991e-06, |
| "loss": 0.2325, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.6448070764068934, |
| "grad_norm": 0.23563332797270775, |
| "learning_rate": 8.572959025833573e-06, |
| "loss": 0.2266, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.6454171114839103, |
| "grad_norm": 0.2464668648207247, |
| "learning_rate": 8.569234653992916e-06, |
| "loss": 0.2311, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.6460271465609273, |
| "grad_norm": 0.23023004719076975, |
| "learning_rate": 8.565506239726068e-06, |
| "loss": 0.2202, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.6466371816379441, |
| "grad_norm": 0.2640229604622966, |
| "learning_rate": 8.561773787255738e-06, |
| "loss": 0.2353, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.6472472167149611, |
| "grad_norm": 0.23223099173660483, |
| "learning_rate": 8.558037300809209e-06, |
| "loss": 0.2291, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.6478572517919781, |
| "grad_norm": 0.22720435240012227, |
| "learning_rate": 8.554296784618331e-06, |
| "loss": 0.2359, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.648467286868995, |
| "grad_norm": 0.22326014158801946, |
| "learning_rate": 8.550552242919521e-06, |
| "loss": 0.2503, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.6490773219460119, |
| "grad_norm": 0.21990941872169437, |
| "learning_rate": 8.546803679953754e-06, |
| "loss": 0.2176, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.6496873570230288, |
| "grad_norm": 0.21398433020072413, |
| "learning_rate": 8.543051099966558e-06, |
| "loss": 0.209, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.6502973921000458, |
| "grad_norm": 0.262452859799478, |
| "learning_rate": 8.539294507208011e-06, |
| "loss": 0.2169, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.6509074271770627, |
| "grad_norm": 0.23857047674039852, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.2535, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.6515174622540796, |
| "grad_norm": 0.2290193027949121, |
| "learning_rate": 8.531769300399901e-06, |
| "loss": 0.2282, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.6521274973310965, |
| "grad_norm": 0.22083932716278717, |
| "learning_rate": 8.5280006948732e-06, |
| "loss": 0.2336, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.6527375324081135, |
| "grad_norm": 0.22246158483514522, |
| "learning_rate": 8.524228093620858e-06, |
| "loss": 0.2416, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.6533475674851303, |
| "grad_norm": 0.21561212780574032, |
| "learning_rate": 8.520451500915636e-06, |
| "loss": 0.2179, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.6539576025621473, |
| "grad_norm": 0.23427975137844478, |
| "learning_rate": 8.516670921034808e-06, |
| "loss": 0.2238, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.6545676376391643, |
| "grad_norm": 0.2424857066387141, |
| "learning_rate": 8.512886358260162e-06, |
| "loss": 0.2243, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.6551776727161812, |
| "grad_norm": 0.22388291176428463, |
| "learning_rate": 8.509097816877998e-06, |
| "loss": 0.2331, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.6557877077931981, |
| "grad_norm": 0.24291536165799196, |
| "learning_rate": 8.505305301179126e-06, |
| "loss": 0.2399, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.656397742870215, |
| "grad_norm": 0.21783349465923516, |
| "learning_rate": 8.501508815458856e-06, |
| "loss": 0.2105, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.657007777947232, |
| "grad_norm": 0.24460448320216843, |
| "learning_rate": 8.49770836401699e-06, |
| "loss": 0.2381, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.6576178130242489, |
| "grad_norm": 0.23595918167659072, |
| "learning_rate": 8.493903951157827e-06, |
| "loss": 0.2404, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.6582278481012658, |
| "grad_norm": 0.2463785035159111, |
| "learning_rate": 8.490095581190149e-06, |
| "loss": 0.2536, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.6588378831782827, |
| "grad_norm": 0.24510766049649824, |
| "learning_rate": 8.486283258427222e-06, |
| "loss": 0.2363, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.6594479182552997, |
| "grad_norm": 0.2217249773828561, |
| "learning_rate": 8.482466987186785e-06, |
| "loss": 0.2324, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.6600579533323166, |
| "grad_norm": 0.2765387808119517, |
| "learning_rate": 8.478646771791054e-06, |
| "loss": 0.2508, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.6606679884093335, |
| "grad_norm": 0.22300248422446325, |
| "learning_rate": 8.474822616566711e-06, |
| "loss": 0.2109, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.6612780234863505, |
| "grad_norm": 0.2173767023833337, |
| "learning_rate": 8.470994525844895e-06, |
| "loss": 0.2235, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.6618880585633674, |
| "grad_norm": 0.2352796493955204, |
| "learning_rate": 8.467162503961209e-06, |
| "loss": 0.2236, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.6624980936403844, |
| "grad_norm": 0.23816022190314465, |
| "learning_rate": 8.463326555255705e-06, |
| "loss": 0.2456, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.6631081287174012, |
| "grad_norm": 0.24662909644422232, |
| "learning_rate": 8.459486684072883e-06, |
| "loss": 0.2315, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.6637181637944182, |
| "grad_norm": 0.22918344342880975, |
| "learning_rate": 8.455642894761684e-06, |
| "loss": 0.2243, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.6643281988714351, |
| "grad_norm": 0.24701880968682127, |
| "learning_rate": 8.451795191675488e-06, |
| "loss": 0.2282, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.6649382339484521, |
| "grad_norm": 0.252271888950006, |
| "learning_rate": 8.44794357917211e-06, |
| "loss": 0.2249, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.6655482690254689, |
| "grad_norm": 0.25713164139831324, |
| "learning_rate": 8.444088061613788e-06, |
| "loss": 0.2483, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.6661583041024859, |
| "grad_norm": 0.23416139444019676, |
| "learning_rate": 8.440228643367188e-06, |
| "loss": 0.2362, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.6667683391795028, |
| "grad_norm": 0.22046541488555407, |
| "learning_rate": 8.436365328803386e-06, |
| "loss": 0.24, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.6673783742565198, |
| "grad_norm": 0.24786462384821747, |
| "learning_rate": 8.432498122297879e-06, |
| "loss": 0.2548, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.6679884093335366, |
| "grad_norm": 0.24671418121331395, |
| "learning_rate": 8.42862702823057e-06, |
| "loss": 0.2313, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.6685984444105536, |
| "grad_norm": 0.2386497978703496, |
| "learning_rate": 8.424752050985759e-06, |
| "loss": 0.2235, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.6692084794875706, |
| "grad_norm": 0.22968504637849413, |
| "learning_rate": 8.420873194952153e-06, |
| "loss": 0.2318, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.6698185145645874, |
| "grad_norm": 0.22870461975088988, |
| "learning_rate": 8.416990464522847e-06, |
| "loss": 0.213, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.6704285496416044, |
| "grad_norm": 0.24043487052170476, |
| "learning_rate": 8.413103864095322e-06, |
| "loss": 0.228, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.6710385847186213, |
| "grad_norm": 0.27758706857221244, |
| "learning_rate": 8.409213398071448e-06, |
| "loss": 0.229, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.6716486197956383, |
| "grad_norm": 0.2278187512949372, |
| "learning_rate": 8.405319070857466e-06, |
| "loss": 0.2099, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.6722586548726551, |
| "grad_norm": 0.21328727861837193, |
| "learning_rate": 8.401420886863998e-06, |
| "loss": 0.2176, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.6728686899496721, |
| "grad_norm": 0.2575211195112028, |
| "learning_rate": 8.39751885050603e-06, |
| "loss": 0.2354, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.673478725026689, |
| "grad_norm": 0.24555722594295123, |
| "learning_rate": 8.393612966202907e-06, |
| "loss": 0.2367, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.674088760103706, |
| "grad_norm": 0.22888826965165, |
| "learning_rate": 8.38970323837834e-06, |
| "loss": 0.2321, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.6746987951807228, |
| "grad_norm": 0.23363250380097145, |
| "learning_rate": 8.385789671460387e-06, |
| "loss": 0.2238, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.6753088302577398, |
| "grad_norm": 0.2274158118030155, |
| "learning_rate": 8.381872269881457e-06, |
| "loss": 0.2212, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.6759188653347568, |
| "grad_norm": 0.265507657795025, |
| "learning_rate": 8.377951038078303e-06, |
| "loss": 0.2848, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.6765289004117737, |
| "grad_norm": 0.23260057000097717, |
| "learning_rate": 8.37402598049201e-06, |
| "loss": 0.2232, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.6771389354887906, |
| "grad_norm": 0.240932782955572, |
| "learning_rate": 8.370097101568007e-06, |
| "loss": 0.2259, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.6777489705658075, |
| "grad_norm": 0.25307725757101474, |
| "learning_rate": 8.366164405756039e-06, |
| "loss": 0.249, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.6783590056428245, |
| "grad_norm": 0.2363549436507277, |
| "learning_rate": 8.36222789751018e-06, |
| "loss": 0.2317, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.6789690407198414, |
| "grad_norm": 0.283464866982075, |
| "learning_rate": 8.358287581288824e-06, |
| "loss": 0.2374, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.6795790757968583, |
| "grad_norm": 0.24552247588641626, |
| "learning_rate": 8.35434346155467e-06, |
| "loss": 0.2321, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.6801891108738752, |
| "grad_norm": 0.24535783592570692, |
| "learning_rate": 8.350395542774737e-06, |
| "loss": 0.2426, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.6807991459508922, |
| "grad_norm": 0.26345289189142446, |
| "learning_rate": 8.34644382942033e-06, |
| "loss": 0.2379, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.6814091810279091, |
| "grad_norm": 0.24433115654789947, |
| "learning_rate": 8.342488325967068e-06, |
| "loss": 0.2436, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.682019216104926, |
| "grad_norm": 0.24813412245663188, |
| "learning_rate": 8.338529036894855e-06, |
| "loss": 0.2427, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.682629251181943, |
| "grad_norm": 0.2246533356599739, |
| "learning_rate": 8.334565966687882e-06, |
| "loss": 0.2309, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.6832392862589599, |
| "grad_norm": 0.24678856538769056, |
| "learning_rate": 8.330599119834622e-06, |
| "loss": 0.2429, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.6838493213359769, |
| "grad_norm": 0.2750478481998693, |
| "learning_rate": 8.326628500827826e-06, |
| "loss": 0.2408, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.6844593564129937, |
| "grad_norm": 0.21598660094808886, |
| "learning_rate": 8.322654114164524e-06, |
| "loss": 0.2102, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.6850693914900107, |
| "grad_norm": 0.2343051371469007, |
| "learning_rate": 8.318675964346001e-06, |
| "loss": 0.2483, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.6856794265670276, |
| "grad_norm": 0.25297310363283115, |
| "learning_rate": 8.314694055877814e-06, |
| "loss": 0.2521, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.6862894616440446, |
| "grad_norm": 0.2337948691832747, |
| "learning_rate": 8.310708393269773e-06, |
| "loss": 0.2205, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.6868994967210614, |
| "grad_norm": 0.23465407792916493, |
| "learning_rate": 8.306718981035937e-06, |
| "loss": 0.2289, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.6875095317980784, |
| "grad_norm": 0.24891299806667935, |
| "learning_rate": 8.302725823694619e-06, |
| "loss": 0.2286, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.6881195668750953, |
| "grad_norm": 0.24328476121321269, |
| "learning_rate": 8.298728925768367e-06, |
| "loss": 0.224, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.6887296019521122, |
| "grad_norm": 0.2382382369544805, |
| "learning_rate": 8.294728291783967e-06, |
| "loss": 0.2386, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.6893396370291291, |
| "grad_norm": 0.2385597546167655, |
| "learning_rate": 8.290723926272439e-06, |
| "loss": 0.212, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.6899496721061461, |
| "grad_norm": 0.25577463335566336, |
| "learning_rate": 8.286715833769027e-06, |
| "loss": 0.2453, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.6905597071831631, |
| "grad_norm": 0.2560431692968465, |
| "learning_rate": 8.282704018813199e-06, |
| "loss": 0.2554, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.6911697422601799, |
| "grad_norm": 0.24340131037369614, |
| "learning_rate": 8.278688485948634e-06, |
| "loss": 0.2067, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.6917797773371969, |
| "grad_norm": 0.23092696341317517, |
| "learning_rate": 8.274669239723223e-06, |
| "loss": 0.2216, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.6923898124142138, |
| "grad_norm": 0.2554840520735112, |
| "learning_rate": 8.270646284689067e-06, |
| "loss": 0.2304, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.6929998474912308, |
| "grad_norm": 0.24958593690963987, |
| "learning_rate": 8.266619625402465e-06, |
| "loss": 0.231, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.6936098825682476, |
| "grad_norm": 0.2536907103950832, |
| "learning_rate": 8.262589266423908e-06, |
| "loss": 0.2377, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.6942199176452646, |
| "grad_norm": 0.2742601681630492, |
| "learning_rate": 8.258555212318083e-06, |
| "loss": 0.2636, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.6948299527222815, |
| "grad_norm": 0.24462935260812824, |
| "learning_rate": 8.254517467653858e-06, |
| "loss": 0.2355, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.6954399877992985, |
| "grad_norm": 0.23730909678619028, |
| "learning_rate": 8.25047603700428e-06, |
| "loss": 0.2246, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.6960500228763153, |
| "grad_norm": 0.2441627512964431, |
| "learning_rate": 8.246430924946575e-06, |
| "loss": 0.2242, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.6966600579533323, |
| "grad_norm": 0.23069368020362158, |
| "learning_rate": 8.242382136062135e-06, |
| "loss": 0.2344, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.6972700930303493, |
| "grad_norm": 0.26857725161103285, |
| "learning_rate": 8.238329674936518e-06, |
| "loss": 0.2517, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.6978801281073662, |
| "grad_norm": 0.2558117137259953, |
| "learning_rate": 8.234273546159441e-06, |
| "loss": 0.2293, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.6984901631843831, |
| "grad_norm": 0.2269265773408567, |
| "learning_rate": 8.230213754324773e-06, |
| "loss": 0.2158, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.6991001982614, |
| "grad_norm": 0.25171573657859114, |
| "learning_rate": 8.226150304030534e-06, |
| "loss": 0.2317, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.699710233338417, |
| "grad_norm": 0.23133479000353166, |
| "learning_rate": 8.222083199878885e-06, |
| "loss": 0.2099, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.7003202684154339, |
| "grad_norm": 0.25686510449037725, |
| "learning_rate": 8.218012446476128e-06, |
| "loss": 0.223, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.7009303034924508, |
| "grad_norm": 0.2456359179322101, |
| "learning_rate": 8.213938048432697e-06, |
| "loss": 0.2481, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.7015403385694677, |
| "grad_norm": 0.251265568965118, |
| "learning_rate": 8.209860010363153e-06, |
| "loss": 0.2336, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.7021503736464847, |
| "grad_norm": 0.25798870050684164, |
| "learning_rate": 8.205778336886182e-06, |
| "loss": 0.2155, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.7027604087235017, |
| "grad_norm": 0.2567842887016378, |
| "learning_rate": 8.201693032624584e-06, |
| "loss": 0.2299, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.7033704438005185, |
| "grad_norm": 0.2318668305771687, |
| "learning_rate": 8.19760410220527e-06, |
| "loss": 0.2303, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.7039804788775355, |
| "grad_norm": 0.22835210783508866, |
| "learning_rate": 8.193511550259268e-06, |
| "loss": 0.2392, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.7045905139545524, |
| "grad_norm": 0.2609582153500793, |
| "learning_rate": 8.189415381421693e-06, |
| "loss": 0.2466, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.7052005490315694, |
| "grad_norm": 0.22982680193286878, |
| "learning_rate": 8.185315600331768e-06, |
| "loss": 0.2321, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.7058105841085862, |
| "grad_norm": 0.2319979015513836, |
| "learning_rate": 8.1812122116328e-06, |
| "loss": 0.2271, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.7064206191856032, |
| "grad_norm": 0.25474865680783587, |
| "learning_rate": 8.177105219972187e-06, |
| "loss": 0.2326, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.7070306542626201, |
| "grad_norm": 0.22168370615095642, |
| "learning_rate": 8.172994630001405e-06, |
| "loss": 0.2037, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.707640689339637, |
| "grad_norm": 0.22219009238421022, |
| "learning_rate": 8.168880446376003e-06, |
| "loss": 0.2162, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.7082507244166539, |
| "grad_norm": 0.24093738372339843, |
| "learning_rate": 8.16476267375561e-06, |
| "loss": 0.2563, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.7088607594936709, |
| "grad_norm": 0.23841753177830316, |
| "learning_rate": 8.160641316803904e-06, |
| "loss": 0.2312, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.7094707945706878, |
| "grad_norm": 0.22861548309953486, |
| "learning_rate": 8.156516380188635e-06, |
| "loss": 0.2336, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.7100808296477047, |
| "grad_norm": 0.2360284376142477, |
| "learning_rate": 8.152387868581606e-06, |
| "loss": 0.2287, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.7106908647247216, |
| "grad_norm": 0.2520783115103026, |
| "learning_rate": 8.148255786658661e-06, |
| "loss": 0.2432, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.7113008998017386, |
| "grad_norm": 0.22384974462504423, |
| "learning_rate": 8.144120139099697e-06, |
| "loss": 0.2309, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.7119109348787556, |
| "grad_norm": 0.23670142303489072, |
| "learning_rate": 8.139980930588643e-06, |
| "loss": 0.2373, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.7125209699557724, |
| "grad_norm": 0.23092054782886554, |
| "learning_rate": 8.135838165813465e-06, |
| "loss": 0.2276, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.7131310050327894, |
| "grad_norm": 0.2271715206436096, |
| "learning_rate": 8.131691849466154e-06, |
| "loss": 0.2255, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.7137410401098063, |
| "grad_norm": 0.2229315252564618, |
| "learning_rate": 8.127541986242726e-06, |
| "loss": 0.2117, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.7143510751868233, |
| "grad_norm": 0.24446595930379747, |
| "learning_rate": 8.123388580843209e-06, |
| "loss": 0.2647, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.7149611102638401, |
| "grad_norm": 0.24913399767279057, |
| "learning_rate": 8.119231637971651e-06, |
| "loss": 0.2427, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.7155711453408571, |
| "grad_norm": 0.2981638083013536, |
| "learning_rate": 8.115071162336099e-06, |
| "loss": 0.2353, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.716181180417874, |
| "grad_norm": 0.24437466812048375, |
| "learning_rate": 8.110907158648605e-06, |
| "loss": 0.247, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.716791215494891, |
| "grad_norm": 0.2449625500398084, |
| "learning_rate": 8.106739631625216e-06, |
| "loss": 0.2364, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.7174012505719078, |
| "grad_norm": 0.2224944056103836, |
| "learning_rate": 8.102568585985968e-06, |
| "loss": 0.2266, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.7180112856489248, |
| "grad_norm": 0.24738870833311483, |
| "learning_rate": 8.098394026454886e-06, |
| "loss": 0.2283, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.7186213207259418, |
| "grad_norm": 0.24328672716739527, |
| "learning_rate": 8.09421595775997e-06, |
| "loss": 0.2401, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.7192313558029587, |
| "grad_norm": 0.22719417926256943, |
| "learning_rate": 8.090034384633194e-06, |
| "loss": 0.232, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.7198413908799756, |
| "grad_norm": 0.24324266904153669, |
| "learning_rate": 8.08584931181051e-06, |
| "loss": 0.2224, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.7204514259569925, |
| "grad_norm": 0.2199116279585472, |
| "learning_rate": 8.081660744031818e-06, |
| "loss": 0.222, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.7210614610340095, |
| "grad_norm": 0.23189331283667097, |
| "learning_rate": 8.077468686040994e-06, |
| "loss": 0.2347, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.7216714961110264, |
| "grad_norm": 0.23879528408430015, |
| "learning_rate": 8.073273142585856e-06, |
| "loss": 0.2254, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.7222815311880433, |
| "grad_norm": 0.22352638483489393, |
| "learning_rate": 8.06907411841817e-06, |
| "loss": 0.2106, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.7228915662650602, |
| "grad_norm": 0.23030909325093743, |
| "learning_rate": 8.064871618293647e-06, |
| "loss": 0.2455, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.7235016013420772, |
| "grad_norm": 0.22349269159069882, |
| "learning_rate": 8.060665646971934e-06, |
| "loss": 0.2243, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.7241116364190942, |
| "grad_norm": 0.2344008817329467, |
| "learning_rate": 8.056456209216609e-06, |
| "loss": 0.2466, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.724721671496111, |
| "grad_norm": 0.21862505201609747, |
| "learning_rate": 8.052243309795175e-06, |
| "loss": 0.2281, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.725331706573128, |
| "grad_norm": 0.22352934742727967, |
| "learning_rate": 8.048026953479062e-06, |
| "loss": 0.2253, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.7259417416501449, |
| "grad_norm": 0.22584075570112383, |
| "learning_rate": 8.043807145043604e-06, |
| "loss": 0.2437, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.7265517767271618, |
| "grad_norm": 0.22596357819552168, |
| "learning_rate": 8.039583889268055e-06, |
| "loss": 0.2533, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.7271618118041787, |
| "grad_norm": 0.2241405931562285, |
| "learning_rate": 8.035357190935565e-06, |
| "loss": 0.219, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.7277718468811957, |
| "grad_norm": 0.2407653255257801, |
| "learning_rate": 8.031127054833192e-06, |
| "loss": 0.2347, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.7283818819582126, |
| "grad_norm": 0.21279327555477656, |
| "learning_rate": 8.026893485751877e-06, |
| "loss": 0.2188, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.7289919170352295, |
| "grad_norm": 0.2261188217955493, |
| "learning_rate": 8.02265648848646e-06, |
| "loss": 0.228, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.7296019521122464, |
| "grad_norm": 0.21871855242114913, |
| "learning_rate": 8.01841606783566e-06, |
| "loss": 0.2244, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.7302119871892634, |
| "grad_norm": 0.26675126905336366, |
| "learning_rate": 8.014172228602063e-06, |
| "loss": 0.2218, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.7308220222662803, |
| "grad_norm": 0.2253793993000552, |
| "learning_rate": 8.009924975592145e-06, |
| "loss": 0.2284, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.7314320573432972, |
| "grad_norm": 0.21110478260357746, |
| "learning_rate": 8.005674313616231e-06, |
| "loss": 0.2224, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.7320420924203141, |
| "grad_norm": 0.2207365556711429, |
| "learning_rate": 8.00142024748852e-06, |
| "loss": 0.2363, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.7326521274973311, |
| "grad_norm": 0.2348452507736591, |
| "learning_rate": 7.997162782027061e-06, |
| "loss": 0.2345, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.7332621625743481, |
| "grad_norm": 0.22623405697304158, |
| "learning_rate": 7.992901922053751e-06, |
| "loss": 0.2309, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.7338721976513649, |
| "grad_norm": 0.22676453319507958, |
| "learning_rate": 7.988637672394337e-06, |
| "loss": 0.2179, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.7344822327283819, |
| "grad_norm": 0.21444773415921856, |
| "learning_rate": 7.984370037878396e-06, |
| "loss": 0.2302, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.7350922678053988, |
| "grad_norm": 0.24197036456944898, |
| "learning_rate": 7.98009902333935e-06, |
| "loss": 0.2344, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.7357023028824158, |
| "grad_norm": 0.241279921065408, |
| "learning_rate": 7.975824633614443e-06, |
| "loss": 0.2179, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.7363123379594326, |
| "grad_norm": 0.21156520913188215, |
| "learning_rate": 7.971546873544737e-06, |
| "loss": 0.2086, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.7369223730364496, |
| "grad_norm": 0.21162231842894277, |
| "learning_rate": 7.967265747975124e-06, |
| "loss": 0.2241, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.7375324081134665, |
| "grad_norm": 0.23532186361425042, |
| "learning_rate": 7.962981261754295e-06, |
| "loss": 0.2316, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.7381424431904835, |
| "grad_norm": 0.2568086699360337, |
| "learning_rate": 7.958693419734752e-06, |
| "loss": 0.2404, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.7387524782675003, |
| "grad_norm": 0.22705182738619106, |
| "learning_rate": 7.954402226772804e-06, |
| "loss": 0.2381, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.7393625133445173, |
| "grad_norm": 0.2261242827810141, |
| "learning_rate": 7.950107687728543e-06, |
| "loss": 0.2314, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.7399725484215343, |
| "grad_norm": 0.22796628523765586, |
| "learning_rate": 7.945809807465857e-06, |
| "loss": 0.2346, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.7405825834985512, |
| "grad_norm": 0.23528591338995508, |
| "learning_rate": 7.941508590852422e-06, |
| "loss": 0.2377, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.7411926185755681, |
| "grad_norm": 0.25031030450654246, |
| "learning_rate": 7.937204042759685e-06, |
| "loss": 0.2375, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.741802653652585, |
| "grad_norm": 0.24714501535227756, |
| "learning_rate": 7.932896168062874e-06, |
| "loss": 0.2343, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.742412688729602, |
| "grad_norm": 0.22388225074316453, |
| "learning_rate": 7.928584971640974e-06, |
| "loss": 0.2272, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.7430227238066189, |
| "grad_norm": 0.2330751968816673, |
| "learning_rate": 7.924270458376746e-06, |
| "loss": 0.2314, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.7436327588836358, |
| "grad_norm": 0.24644856853549163, |
| "learning_rate": 7.919952633156695e-06, |
| "loss": 0.2593, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.7442427939606527, |
| "grad_norm": 0.22879737615778123, |
| "learning_rate": 7.915631500871084e-06, |
| "loss": 0.2643, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.7448528290376697, |
| "grad_norm": 0.23142298859084215, |
| "learning_rate": 7.91130706641392e-06, |
| "loss": 0.2379, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.7454628641146865, |
| "grad_norm": 0.2524317523072115, |
| "learning_rate": 7.906979334682948e-06, |
| "loss": 0.23, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.7460728991917035, |
| "grad_norm": 0.23189754430124038, |
| "learning_rate": 7.90264831057965e-06, |
| "loss": 0.2304, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.7466829342687205, |
| "grad_norm": 0.2533738094883779, |
| "learning_rate": 7.898313999009238e-06, |
| "loss": 0.2385, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.7472929693457374, |
| "grad_norm": 0.2553489046355843, |
| "learning_rate": 7.893976404880643e-06, |
| "loss": 0.2507, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.7479030044227543, |
| "grad_norm": 0.25852436326492156, |
| "learning_rate": 7.889635533106515e-06, |
| "loss": 0.2351, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.7485130394997712, |
| "grad_norm": 0.22967301302314835, |
| "learning_rate": 7.88529138860322e-06, |
| "loss": 0.2266, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.7491230745767882, |
| "grad_norm": 0.26120335319102406, |
| "learning_rate": 7.880943976290826e-06, |
| "loss": 0.2486, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.7497331096538051, |
| "grad_norm": 0.25231195952917956, |
| "learning_rate": 7.876593301093104e-06, |
| "loss": 0.2263, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.750343144730822, |
| "grad_norm": 0.22763902531310243, |
| "learning_rate": 7.87223936793752e-06, |
| "loss": 0.2245, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.7509531798078389, |
| "grad_norm": 0.22658329796075055, |
| "learning_rate": 7.86788218175523e-06, |
| "loss": 0.2008, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.7515632148848559, |
| "grad_norm": 0.23368671212970132, |
| "learning_rate": 7.863521747481078e-06, |
| "loss": 0.2558, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.7521732499618728, |
| "grad_norm": 0.228740592341535, |
| "learning_rate": 7.859158070053578e-06, |
| "loss": 0.2369, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.7527832850388897, |
| "grad_norm": 0.21748172031890037, |
| "learning_rate": 7.854791154414925e-06, |
| "loss": 0.2074, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.7533933201159067, |
| "grad_norm": 0.21994817465646302, |
| "learning_rate": 7.850421005510977e-06, |
| "loss": 0.2332, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.7540033551929236, |
| "grad_norm": 0.23920796683942835, |
| "learning_rate": 7.846047628291259e-06, |
| "loss": 0.2242, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.7546133902699406, |
| "grad_norm": 0.2225627258100121, |
| "learning_rate": 7.841671027708945e-06, |
| "loss": 0.2407, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.7552234253469574, |
| "grad_norm": 0.23186328985692906, |
| "learning_rate": 7.837291208720867e-06, |
| "loss": 0.2325, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.7558334604239744, |
| "grad_norm": 0.21580905533020986, |
| "learning_rate": 7.832908176287497e-06, |
| "loss": 0.2167, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.7564434955009913, |
| "grad_norm": 0.22400244139158307, |
| "learning_rate": 7.828521935372948e-06, |
| "loss": 0.2289, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.7570535305780083, |
| "grad_norm": 0.22654448268365424, |
| "learning_rate": 7.824132490944968e-06, |
| "loss": 0.2203, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.7576635656550251, |
| "grad_norm": 0.23447871941330142, |
| "learning_rate": 7.819739847974932e-06, |
| "loss": 0.2479, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.7582736007320421, |
| "grad_norm": 0.2298061806062539, |
| "learning_rate": 7.81534401143784e-06, |
| "loss": 0.2247, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.758883635809059, |
| "grad_norm": 0.20948289511876436, |
| "learning_rate": 7.810944986312303e-06, |
| "loss": 0.229, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.759493670886076, |
| "grad_norm": 0.21746148602267854, |
| "learning_rate": 7.80654277758055e-06, |
| "loss": 0.2249, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.7601037059630928, |
| "grad_norm": 0.24122673913757323, |
| "learning_rate": 7.802137390228413e-06, |
| "loss": 0.2371, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.7607137410401098, |
| "grad_norm": 0.24321359321004912, |
| "learning_rate": 7.797728829245321e-06, |
| "loss": 0.2248, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.7613237761171268, |
| "grad_norm": 0.25483262345781993, |
| "learning_rate": 7.793317099624304e-06, |
| "loss": 0.2659, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.7619338111941437, |
| "grad_norm": 0.22017019551240405, |
| "learning_rate": 7.788902206361974e-06, |
| "loss": 0.2327, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.7625438462711606, |
| "grad_norm": 0.23290295174722392, |
| "learning_rate": 7.784484154458529e-06, |
| "loss": 0.2337, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.7631538813481775, |
| "grad_norm": 0.24425639545299802, |
| "learning_rate": 7.780062948917748e-06, |
| "loss": 0.253, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.7637639164251945, |
| "grad_norm": 0.25547736930193293, |
| "learning_rate": 7.775638594746975e-06, |
| "loss": 0.2324, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.7643739515022113, |
| "grad_norm": 0.25797109907955645, |
| "learning_rate": 7.771211096957125e-06, |
| "loss": 0.2235, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.7649839865792283, |
| "grad_norm": 0.22509840516733273, |
| "learning_rate": 7.766780460562668e-06, |
| "loss": 0.2356, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.7655940216562452, |
| "grad_norm": 0.23188690975236592, |
| "learning_rate": 7.762346690581636e-06, |
| "loss": 0.2324, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.7662040567332622, |
| "grad_norm": 0.2649412152942703, |
| "learning_rate": 7.757909792035608e-06, |
| "loss": 0.2499, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.766814091810279, |
| "grad_norm": 0.22237757487770793, |
| "learning_rate": 7.753469769949701e-06, |
| "loss": 0.2205, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.767424126887296, |
| "grad_norm": 0.2307508237229708, |
| "learning_rate": 7.749026629352574e-06, |
| "loss": 0.2214, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.768034161964313, |
| "grad_norm": 0.24305561326911349, |
| "learning_rate": 7.744580375276416e-06, |
| "loss": 0.2255, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.7686441970413299, |
| "grad_norm": 0.23895046411395413, |
| "learning_rate": 7.740131012756949e-06, |
| "loss": 0.2494, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.7692542321183468, |
| "grad_norm": 0.21275999961893463, |
| "learning_rate": 7.735678546833403e-06, |
| "loss": 0.2257, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.7698642671953637, |
| "grad_norm": 0.2300948862077056, |
| "learning_rate": 7.731222982548534e-06, |
| "loss": 0.2123, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.7704743022723807, |
| "grad_norm": 0.22850187582777248, |
| "learning_rate": 7.726764324948603e-06, |
| "loss": 0.2463, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.7710843373493976, |
| "grad_norm": 0.2349740693352672, |
| "learning_rate": 7.722302579083372e-06, |
| "loss": 0.2151, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.7716943724264145, |
| "grad_norm": 0.2291372748905964, |
| "learning_rate": 7.717837750006106e-06, |
| "loss": 0.2314, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.7723044075034314, |
| "grad_norm": 0.23656682674024532, |
| "learning_rate": 7.713369842773559e-06, |
| "loss": 0.2319, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.7729144425804484, |
| "grad_norm": 0.23540280383471496, |
| "learning_rate": 7.708898862445968e-06, |
| "loss": 0.2279, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.7735244776574653, |
| "grad_norm": 0.22784114220338783, |
| "learning_rate": 7.704424814087056e-06, |
| "loss": 0.2436, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.7741345127344822, |
| "grad_norm": 0.2153530551088378, |
| "learning_rate": 7.699947702764021e-06, |
| "loss": 0.2231, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.7747445478114992, |
| "grad_norm": 0.22471820248006716, |
| "learning_rate": 7.695467533547526e-06, |
| "loss": 0.2285, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.7753545828885161, |
| "grad_norm": 0.29785558070116797, |
| "learning_rate": 7.690984311511695e-06, |
| "loss": 0.2428, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.7759646179655331, |
| "grad_norm": 0.2330391693953769, |
| "learning_rate": 7.686498041734121e-06, |
| "loss": 0.2484, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.7765746530425499, |
| "grad_norm": 0.232168542840295, |
| "learning_rate": 7.682008729295834e-06, |
| "loss": 0.2431, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.7771846881195669, |
| "grad_norm": 0.2343178698731269, |
| "learning_rate": 7.67751637928132e-06, |
| "loss": 0.2376, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.7777947231965838, |
| "grad_norm": 0.24092057852258092, |
| "learning_rate": 7.673020996778507e-06, |
| "loss": 0.2305, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.7784047582736008, |
| "grad_norm": 0.22934187162290043, |
| "learning_rate": 7.668522586878747e-06, |
| "loss": 0.2271, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.7790147933506176, |
| "grad_norm": 0.23320760506261856, |
| "learning_rate": 7.664021154676828e-06, |
| "loss": 0.2465, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.7796248284276346, |
| "grad_norm": 0.25335406369839425, |
| "learning_rate": 7.659516705270964e-06, |
| "loss": 0.2355, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.7802348635046515, |
| "grad_norm": 0.22804728767238044, |
| "learning_rate": 7.655009243762779e-06, |
| "loss": 0.2329, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.7808448985816685, |
| "grad_norm": 0.24357066588081885, |
| "learning_rate": 7.650498775257308e-06, |
| "loss": 0.2481, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.7814549336586853, |
| "grad_norm": 0.22792879051170845, |
| "learning_rate": 7.645985304863004e-06, |
| "loss": 0.2274, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.7820649687357023, |
| "grad_norm": 0.2427200250101618, |
| "learning_rate": 7.641468837691704e-06, |
| "loss": 0.249, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.7826750038127193, |
| "grad_norm": 0.24302283034392322, |
| "learning_rate": 7.636949378858647e-06, |
| "loss": 0.2484, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.7832850388897361, |
| "grad_norm": 0.24328162193435018, |
| "learning_rate": 7.632426933482463e-06, |
| "loss": 0.2345, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.7838950739667531, |
| "grad_norm": 0.23545752554754826, |
| "learning_rate": 7.627901506685157e-06, |
| "loss": 0.2267, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.78450510904377, |
| "grad_norm": 0.24166143220162728, |
| "learning_rate": 7.623373103592117e-06, |
| "loss": 0.2232, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.785115144120787, |
| "grad_norm": 0.22697451038495747, |
| "learning_rate": 7.618841729332096e-06, |
| "loss": 0.2197, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.7857251791978038, |
| "grad_norm": 0.24022269483018174, |
| "learning_rate": 7.614307389037221e-06, |
| "loss": 0.2229, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.7863352142748208, |
| "grad_norm": 0.2734279797575056, |
| "learning_rate": 7.609770087842969e-06, |
| "loss": 0.2358, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.7869452493518377, |
| "grad_norm": 0.22672940324544666, |
| "learning_rate": 7.605229830888175e-06, |
| "loss": 0.2501, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.7875552844288547, |
| "grad_norm": 0.2313163672526963, |
| "learning_rate": 7.6006866233150185e-06, |
| "loss": 0.2309, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.7881653195058715, |
| "grad_norm": 0.24972872083148773, |
| "learning_rate": 7.596140470269029e-06, |
| "loss": 0.2446, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.7887753545828885, |
| "grad_norm": 0.23952232980453123, |
| "learning_rate": 7.5915913768990615e-06, |
| "loss": 0.2335, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.7893853896599055, |
| "grad_norm": 0.22291765355920107, |
| "learning_rate": 7.587039348357306e-06, |
| "loss": 0.2221, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.7899954247369224, |
| "grad_norm": 0.22097300349547738, |
| "learning_rate": 7.582484389799279e-06, |
| "loss": 0.226, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.7906054598139393, |
| "grad_norm": 0.23934611021017924, |
| "learning_rate": 7.57792650638381e-06, |
| "loss": 0.2405, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.7912154948909562, |
| "grad_norm": 0.23655610661652388, |
| "learning_rate": 7.573365703273045e-06, |
| "loss": 0.2409, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.7918255299679732, |
| "grad_norm": 0.2322235300104474, |
| "learning_rate": 7.568801985632439e-06, |
| "loss": 0.2316, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.7924355650449901, |
| "grad_norm": 0.23263724623119364, |
| "learning_rate": 7.564235358630741e-06, |
| "loss": 0.2224, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.793045600122007, |
| "grad_norm": 0.24587977411446366, |
| "learning_rate": 7.559665827439999e-06, |
| "loss": 0.2279, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.7936556351990239, |
| "grad_norm": 0.23587703962080825, |
| "learning_rate": 7.555093397235553e-06, |
| "loss": 0.2108, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.7942656702760409, |
| "grad_norm": 0.2298321573596229, |
| "learning_rate": 7.55051807319602e-06, |
| "loss": 0.2231, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.7948757053530578, |
| "grad_norm": 0.2419845813611781, |
| "learning_rate": 7.545939860503302e-06, |
| "loss": 0.2276, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.7954857404300747, |
| "grad_norm": 0.2376519344621617, |
| "learning_rate": 7.541358764342566e-06, |
| "loss": 0.2195, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.7960957755070917, |
| "grad_norm": 0.24021446019082004, |
| "learning_rate": 7.536774789902246e-06, |
| "loss": 0.2294, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.7967058105841086, |
| "grad_norm": 0.3073505701919987, |
| "learning_rate": 7.532187942374039e-06, |
| "loss": 0.2234, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.7973158456611256, |
| "grad_norm": 0.2504795234075228, |
| "learning_rate": 7.527598226952895e-06, |
| "loss": 0.2426, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.7979258807381424, |
| "grad_norm": 0.25002423317398603, |
| "learning_rate": 7.52300564883701e-06, |
| "loss": 0.2494, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.7985359158151594, |
| "grad_norm": 0.22645304542766165, |
| "learning_rate": 7.518410213227823e-06, |
| "loss": 0.226, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.7991459508921763, |
| "grad_norm": 0.2339205517501626, |
| "learning_rate": 7.51381192533001e-06, |
| "loss": 0.229, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.7997559859691933, |
| "grad_norm": 0.25401845978653675, |
| "learning_rate": 7.50921079035148e-06, |
| "loss": 0.2394, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.8003660210462101, |
| "grad_norm": 0.22318589801172659, |
| "learning_rate": 7.504606813503359e-06, |
| "loss": 0.1975, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.8009760561232271, |
| "grad_norm": 0.23765624448180353, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.2428, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.801586091200244, |
| "grad_norm": 0.22809616997517404, |
| "learning_rate": 7.495390355058965e-06, |
| "loss": 0.2151, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.8021961262772609, |
| "grad_norm": 0.23805330961204915, |
| "learning_rate": 7.490777883901022e-06, |
| "loss": 0.198, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.8028061613542778, |
| "grad_norm": 0.23254499760309993, |
| "learning_rate": 7.486162591750139e-06, |
| "loss": 0.2341, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.8034161964312948, |
| "grad_norm": 0.2368902139011421, |
| "learning_rate": 7.481544483833485e-06, |
| "loss": 0.2471, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.8040262315083118, |
| "grad_norm": 0.23430686145673094, |
| "learning_rate": 7.476923565381409e-06, |
| "loss": 0.2435, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.8046362665853286, |
| "grad_norm": 0.2548091577394907, |
| "learning_rate": 7.472299841627452e-06, |
| "loss": 0.2323, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.8052463016623456, |
| "grad_norm": 0.24438720549939227, |
| "learning_rate": 7.467673317808326e-06, |
| "loss": 0.236, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.8058563367393625, |
| "grad_norm": 0.23249596660173585, |
| "learning_rate": 7.463043999163919e-06, |
| "loss": 0.2241, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.8064663718163795, |
| "grad_norm": 0.2552422967988723, |
| "learning_rate": 7.45841189093728e-06, |
| "loss": 0.2226, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.8070764068933963, |
| "grad_norm": 0.23619599542524092, |
| "learning_rate": 7.4537769983746176e-06, |
| "loss": 0.2368, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.8076864419704133, |
| "grad_norm": 0.2338399745898082, |
| "learning_rate": 7.4491393267253e-06, |
| "loss": 0.2464, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.8082964770474302, |
| "grad_norm": 0.238242750737787, |
| "learning_rate": 7.444498881241835e-06, |
| "loss": 0.2349, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.8089065121244472, |
| "grad_norm": 0.24692516610408854, |
| "learning_rate": 7.439855667179878e-06, |
| "loss": 0.2449, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.809516547201464, |
| "grad_norm": 0.22680401164554698, |
| "learning_rate": 7.435209689798214e-06, |
| "loss": 0.2178, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.810126582278481, |
| "grad_norm": 0.21885722102120825, |
| "learning_rate": 7.430560954358764e-06, |
| "loss": 0.2185, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.810736617355498, |
| "grad_norm": 0.22330802053954865, |
| "learning_rate": 7.4259094661265685e-06, |
| "loss": 0.2227, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.8113466524325149, |
| "grad_norm": 0.2277247401992638, |
| "learning_rate": 7.421255230369789e-06, |
| "loss": 0.2187, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.8119566875095318, |
| "grad_norm": 0.21704734934316533, |
| "learning_rate": 7.4165982523596945e-06, |
| "loss": 0.2145, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.8125667225865487, |
| "grad_norm": 0.23756550362674023, |
| "learning_rate": 7.411938537370665e-06, |
| "loss": 0.2351, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.8131767576635657, |
| "grad_norm": 0.21908133335811206, |
| "learning_rate": 7.407276090680173e-06, |
| "loss": 0.2256, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.8137867927405826, |
| "grad_norm": 0.23818954219879968, |
| "learning_rate": 7.402610917568794e-06, |
| "loss": 0.2353, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.8143968278175995, |
| "grad_norm": 0.22786836784604528, |
| "learning_rate": 7.397943023320185e-06, |
| "loss": 0.2096, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.8150068628946164, |
| "grad_norm": 0.24966631084866364, |
| "learning_rate": 7.393272413221087e-06, |
| "loss": 0.276, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.8156168979716334, |
| "grad_norm": 0.22642197259958527, |
| "learning_rate": 7.388599092561315e-06, |
| "loss": 0.226, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.8162269330486503, |
| "grad_norm": 0.23581467816794235, |
| "learning_rate": 7.383923066633757e-06, |
| "loss": 0.2403, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.8168369681256672, |
| "grad_norm": 0.24104276175337372, |
| "learning_rate": 7.379244340734364e-06, |
| "loss": 0.2392, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.8174470032026842, |
| "grad_norm": 0.2309568139148668, |
| "learning_rate": 7.374562920162143e-06, |
| "loss": 0.2483, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.8180570382797011, |
| "grad_norm": 0.24852331910971381, |
| "learning_rate": 7.369878810219154e-06, |
| "loss": 0.2224, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.818667073356718, |
| "grad_norm": 0.23163239528602403, |
| "learning_rate": 7.365192016210504e-06, |
| "loss": 0.2423, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.8192771084337349, |
| "grad_norm": 0.22613304330564601, |
| "learning_rate": 7.360502543444339e-06, |
| "loss": 0.2288, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.8198871435107519, |
| "grad_norm": 0.2316101029741555, |
| "learning_rate": 7.355810397231839e-06, |
| "loss": 0.2302, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.8204971785877688, |
| "grad_norm": 0.38057976097802504, |
| "learning_rate": 7.351115582887212e-06, |
| "loss": 0.2353, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.8211072136647857, |
| "grad_norm": 0.21811622063757893, |
| "learning_rate": 7.3464181057276864e-06, |
| "loss": 0.2135, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.8217172487418026, |
| "grad_norm": 0.22294497991648154, |
| "learning_rate": 7.341717971073508e-06, |
| "loss": 0.2314, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.8223272838188196, |
| "grad_norm": 0.2198410794160411, |
| "learning_rate": 7.337015184247934e-06, |
| "loss": 0.2286, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.8229373188958365, |
| "grad_norm": 0.23378756412976776, |
| "learning_rate": 7.3323097505772225e-06, |
| "loss": 0.2143, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.8235473539728534, |
| "grad_norm": 0.2472951356627941, |
| "learning_rate": 7.32760167539063e-06, |
| "loss": 0.2393, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.8241573890498703, |
| "grad_norm": 0.22148366770064548, |
| "learning_rate": 7.322890964020404e-06, |
| "loss": 0.2371, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.8247674241268873, |
| "grad_norm": 0.2400413207311959, |
| "learning_rate": 7.318177621801781e-06, |
| "loss": 0.2268, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.8253774592039043, |
| "grad_norm": 0.244478678935058, |
| "learning_rate": 7.313461654072974e-06, |
| "loss": 0.2285, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.8259874942809211, |
| "grad_norm": 0.2424947875863482, |
| "learning_rate": 7.308743066175172e-06, |
| "loss": 0.2486, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.8265975293579381, |
| "grad_norm": 0.24438448325110573, |
| "learning_rate": 7.304021863452525e-06, |
| "loss": 0.2267, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.827207564434955, |
| "grad_norm": 0.23288442093325284, |
| "learning_rate": 7.299298051252155e-06, |
| "loss": 0.2216, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.827817599511972, |
| "grad_norm": 0.26337805688342736, |
| "learning_rate": 7.2945716349241305e-06, |
| "loss": 0.2435, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.8284276345889888, |
| "grad_norm": 0.22968689259833996, |
| "learning_rate": 7.289842619821475e-06, |
| "loss": 0.2263, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.8290376696660058, |
| "grad_norm": 0.2403816832878819, |
| "learning_rate": 7.285111011300153e-06, |
| "loss": 0.2492, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.8296477047430227, |
| "grad_norm": 0.24875377601622348, |
| "learning_rate": 7.280376814719065e-06, |
| "loss": 0.2502, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.8302577398200397, |
| "grad_norm": 0.24312711346639268, |
| "learning_rate": 7.2756400354400445e-06, |
| "loss": 0.2428, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.8308677748970565, |
| "grad_norm": 0.22234901823763067, |
| "learning_rate": 7.2709006788278505e-06, |
| "loss": 0.2126, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.8314778099740735, |
| "grad_norm": 0.23753237391538568, |
| "learning_rate": 7.266158750250158e-06, |
| "loss": 0.2435, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.8320878450510905, |
| "grad_norm": 0.22756154601895012, |
| "learning_rate": 7.261414255077561e-06, |
| "loss": 0.2296, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.8326978801281074, |
| "grad_norm": 0.23538810665024323, |
| "learning_rate": 7.2566671986835515e-06, |
| "loss": 0.232, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.8333079152051243, |
| "grad_norm": 0.2420894512158573, |
| "learning_rate": 7.2519175864445305e-06, |
| "loss": 0.224, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.8339179502821412, |
| "grad_norm": 0.2447468401595778, |
| "learning_rate": 7.24716542373979e-06, |
| "loss": 0.2458, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.8345279853591582, |
| "grad_norm": 0.22222057490321495, |
| "learning_rate": 7.242410715951511e-06, |
| "loss": 0.2421, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.8351380204361751, |
| "grad_norm": 0.22700120289677556, |
| "learning_rate": 7.237653468464756e-06, |
| "loss": 0.1935, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.835748055513192, |
| "grad_norm": 0.24917410581205887, |
| "learning_rate": 7.232893686667466e-06, |
| "loss": 0.2266, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.8363580905902089, |
| "grad_norm": 0.2213826712700349, |
| "learning_rate": 7.22813137595045e-06, |
| "loss": 0.2278, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.8369681256672259, |
| "grad_norm": 0.2150592481787688, |
| "learning_rate": 7.223366541707384e-06, |
| "loss": 0.2234, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.8375781607442427, |
| "grad_norm": 0.24553766658182227, |
| "learning_rate": 7.218599189334799e-06, |
| "loss": 0.2251, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.8381881958212597, |
| "grad_norm": 0.250753725538139, |
| "learning_rate": 7.21382932423208e-06, |
| "loss": 0.2321, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.8387982308982767, |
| "grad_norm": 0.23356843887284268, |
| "learning_rate": 7.209056951801459e-06, |
| "loss": 0.2524, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.8394082659752936, |
| "grad_norm": 0.2405478444046161, |
| "learning_rate": 7.204282077448002e-06, |
| "loss": 0.2302, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.8400183010523105, |
| "grad_norm": 0.24711542527518374, |
| "learning_rate": 7.199504706579617e-06, |
| "loss": 0.225, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.8406283361293274, |
| "grad_norm": 0.2440331913107547, |
| "learning_rate": 7.194724844607033e-06, |
| "loss": 0.2272, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.8412383712063444, |
| "grad_norm": 0.24071419085405785, |
| "learning_rate": 7.189942496943803e-06, |
| "loss": 0.2205, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.8418484062833613, |
| "grad_norm": 0.2241466462729327, |
| "learning_rate": 7.185157669006294e-06, |
| "loss": 0.2469, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.8424584413603782, |
| "grad_norm": 0.2643101109270153, |
| "learning_rate": 7.180370366213684e-06, |
| "loss": 0.23, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.8430684764373951, |
| "grad_norm": 0.24502726261774888, |
| "learning_rate": 7.175580593987952e-06, |
| "loss": 0.2623, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.8436785115144121, |
| "grad_norm": 0.2439187420244412, |
| "learning_rate": 7.170788357753873e-06, |
| "loss": 0.2445, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.844288546591429, |
| "grad_norm": 0.2166186222289749, |
| "learning_rate": 7.165993662939018e-06, |
| "loss": 0.1993, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.8448985816684459, |
| "grad_norm": 0.23837694647294, |
| "learning_rate": 7.161196514973735e-06, |
| "loss": 0.2447, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.8455086167454628, |
| "grad_norm": 0.232204914110554, |
| "learning_rate": 7.1563969192911576e-06, |
| "loss": 0.2278, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.8461186518224798, |
| "grad_norm": 0.24331665799552643, |
| "learning_rate": 7.1515948813271875e-06, |
| "loss": 0.2459, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.8467286868994968, |
| "grad_norm": 0.22854293193671033, |
| "learning_rate": 7.146790406520491e-06, |
| "loss": 0.2589, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.8473387219765136, |
| "grad_norm": 0.2483900851541747, |
| "learning_rate": 7.141983500312498e-06, |
| "loss": 0.2509, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.8479487570535306, |
| "grad_norm": 0.22671118415346048, |
| "learning_rate": 7.137174168147392e-06, |
| "loss": 0.2328, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.8485587921305475, |
| "grad_norm": 0.22736887462102334, |
| "learning_rate": 7.132362415472099e-06, |
| "loss": 0.2428, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.8491688272075645, |
| "grad_norm": 0.23152556848811895, |
| "learning_rate": 7.1275482477362946e-06, |
| "loss": 0.2188, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.8497788622845813, |
| "grad_norm": 0.23959385612766362, |
| "learning_rate": 7.122731670392381e-06, |
| "loss": 0.2403, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.8503888973615983, |
| "grad_norm": 0.23126381072035201, |
| "learning_rate": 7.1179126888954954e-06, |
| "loss": 0.2276, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.8509989324386152, |
| "grad_norm": 0.2408726152963115, |
| "learning_rate": 7.113091308703498e-06, |
| "loss": 0.233, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.8516089675156322, |
| "grad_norm": 0.23586865768989854, |
| "learning_rate": 7.108267535276962e-06, |
| "loss": 0.2342, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.852219002592649, |
| "grad_norm": 0.23510387961362605, |
| "learning_rate": 7.1034413740791705e-06, |
| "loss": 0.2529, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.852829037669666, |
| "grad_norm": 0.22766231088115058, |
| "learning_rate": 7.098612830576118e-06, |
| "loss": 0.2396, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.853439072746683, |
| "grad_norm": 0.23117358845079555, |
| "learning_rate": 7.09378191023649e-06, |
| "loss": 0.2501, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.8540491078236999, |
| "grad_norm": 0.24728580197582728, |
| "learning_rate": 7.088948618531668e-06, |
| "loss": 0.2496, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.8546591429007168, |
| "grad_norm": 0.239872576193232, |
| "learning_rate": 7.0841129609357165e-06, |
| "loss": 0.21, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.8552691779777337, |
| "grad_norm": 0.21627847504408382, |
| "learning_rate": 7.0792749429253785e-06, |
| "loss": 0.2115, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.8558792130547507, |
| "grad_norm": 0.23893267429014625, |
| "learning_rate": 7.0744345699800755e-06, |
| "loss": 0.2397, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.8564892481317675, |
| "grad_norm": 0.23226050538125845, |
| "learning_rate": 7.069591847581894e-06, |
| "loss": 0.2274, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.8570992832087845, |
| "grad_norm": 0.34291310562756117, |
| "learning_rate": 7.064746781215578e-06, |
| "loss": 0.2378, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.8577093182858014, |
| "grad_norm": 0.2330305946371675, |
| "learning_rate": 7.059899376368531e-06, |
| "loss": 0.2393, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.8583193533628184, |
| "grad_norm": 0.23466165031403366, |
| "learning_rate": 7.0550496385308e-06, |
| "loss": 0.2286, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.8589293884398352, |
| "grad_norm": 0.21108347674892095, |
| "learning_rate": 7.050197573195081e-06, |
| "loss": 0.2238, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.8595394235168522, |
| "grad_norm": 0.2119361948395696, |
| "learning_rate": 7.045343185856701e-06, |
| "loss": 0.2187, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.8601494585938692, |
| "grad_norm": 0.2288603535650092, |
| "learning_rate": 7.0404864820136165e-06, |
| "loss": 0.2311, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.8607594936708861, |
| "grad_norm": 0.2294300887569088, |
| "learning_rate": 7.03562746716641e-06, |
| "loss": 0.2376, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.861369528747903, |
| "grad_norm": 0.22628168525352094, |
| "learning_rate": 7.030766146818281e-06, |
| "loss": 0.2425, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.8619795638249199, |
| "grad_norm": 0.23004126887608609, |
| "learning_rate": 7.025902526475039e-06, |
| "loss": 0.2351, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.8625895989019369, |
| "grad_norm": 0.2156346960003979, |
| "learning_rate": 7.0210366116451e-06, |
| "loss": 0.2184, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.8631996339789538, |
| "grad_norm": 0.23633568097737323, |
| "learning_rate": 7.016168407839478e-06, |
| "loss": 0.2421, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.8638096690559707, |
| "grad_norm": 0.7032365965820666, |
| "learning_rate": 7.011297920571779e-06, |
| "loss": 0.22, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.8644197041329876, |
| "grad_norm": 0.22139560165332492, |
| "learning_rate": 7.006425155358195e-06, |
| "loss": 0.2411, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.8650297392100046, |
| "grad_norm": 0.22921141024552555, |
| "learning_rate": 7.001550117717499e-06, |
| "loss": 0.2243, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.8656397742870215, |
| "grad_norm": 0.2148756902424555, |
| "learning_rate": 6.996672813171036e-06, |
| "loss": 0.2191, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.8662498093640384, |
| "grad_norm": 0.24364684199653186, |
| "learning_rate": 6.991793247242719e-06, |
| "loss": 0.2504, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.8668598444410553, |
| "grad_norm": 0.22724841878361154, |
| "learning_rate": 6.986911425459028e-06, |
| "loss": 0.2158, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.8674698795180723, |
| "grad_norm": 0.2298061255709973, |
| "learning_rate": 6.982027353348985e-06, |
| "loss": 0.2276, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.8680799145950893, |
| "grad_norm": 0.24823382304574634, |
| "learning_rate": 6.977141036444174e-06, |
| "loss": 0.2608, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.8686899496721061, |
| "grad_norm": 0.23330840784018506, |
| "learning_rate": 6.9722524802787125e-06, |
| "loss": 0.2384, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.8692999847491231, |
| "grad_norm": 0.23031863340182507, |
| "learning_rate": 6.967361690389258e-06, |
| "loss": 0.2384, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.86991001982614, |
| "grad_norm": 0.23343096638678282, |
| "learning_rate": 6.962468672314999e-06, |
| "loss": 0.2247, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.870520054903157, |
| "grad_norm": 0.23412336450252114, |
| "learning_rate": 6.957573431597646e-06, |
| "loss": 0.2439, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.8711300899801738, |
| "grad_norm": 0.21705008516947805, |
| "learning_rate": 6.952675973781423e-06, |
| "loss": 0.2289, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.8717401250571908, |
| "grad_norm": 0.23462263092212576, |
| "learning_rate": 6.947776304413072e-06, |
| "loss": 0.2386, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.8723501601342077, |
| "grad_norm": 0.22881311935709336, |
| "learning_rate": 6.942874429041834e-06, |
| "loss": 0.2257, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.8729601952112247, |
| "grad_norm": 0.2165771690749278, |
| "learning_rate": 6.937970353219453e-06, |
| "loss": 0.2272, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.8735702302882415, |
| "grad_norm": 0.21766263672481576, |
| "learning_rate": 6.933064082500161e-06, |
| "loss": 0.227, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.8741802653652585, |
| "grad_norm": 0.23341818589676822, |
| "learning_rate": 6.92815562244068e-06, |
| "loss": 0.2266, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.8747903004422755, |
| "grad_norm": 0.23091764664489853, |
| "learning_rate": 6.923244978600206e-06, |
| "loss": 0.2372, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.8754003355192923, |
| "grad_norm": 0.22688425972610404, |
| "learning_rate": 6.918332156540417e-06, |
| "loss": 0.2426, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.8760103705963093, |
| "grad_norm": 0.2670114310566748, |
| "learning_rate": 6.913417161825449e-06, |
| "loss": 0.2505, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.8766204056733262, |
| "grad_norm": 0.22422672515482298, |
| "learning_rate": 6.908500000021905e-06, |
| "loss": 0.2322, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.8772304407503432, |
| "grad_norm": 0.24003057489356389, |
| "learning_rate": 6.903580676698838e-06, |
| "loss": 0.25, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.87784047582736, |
| "grad_norm": 0.2317200045289886, |
| "learning_rate": 6.898659197427748e-06, |
| "loss": 0.2423, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.878450510904377, |
| "grad_norm": 0.22708239407650227, |
| "learning_rate": 6.893735567782587e-06, |
| "loss": 0.2427, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.8790605459813939, |
| "grad_norm": 0.2366178727370385, |
| "learning_rate": 6.888809793339729e-06, |
| "loss": 0.2324, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.8796705810584109, |
| "grad_norm": 0.2376731316602752, |
| "learning_rate": 6.8838818796779875e-06, |
| "loss": 0.2234, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.8802806161354277, |
| "grad_norm": 0.24035037512939267, |
| "learning_rate": 6.878951832378591e-06, |
| "loss": 0.2295, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.8808906512124447, |
| "grad_norm": 0.21746369005203028, |
| "learning_rate": 6.87401965702519e-06, |
| "loss": 0.2382, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.8815006862894617, |
| "grad_norm": 0.23319099616460107, |
| "learning_rate": 6.869085359203844e-06, |
| "loss": 0.2195, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.8821107213664786, |
| "grad_norm": 0.2382960087546005, |
| "learning_rate": 6.864148944503016e-06, |
| "loss": 0.2263, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.8827207564434955, |
| "grad_norm": 0.2311147461646032, |
| "learning_rate": 6.859210418513564e-06, |
| "loss": 0.2364, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.8833307915205124, |
| "grad_norm": 0.2363109991798035, |
| "learning_rate": 6.854269786828741e-06, |
| "loss": 0.2346, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.8839408265975294, |
| "grad_norm": 0.22484383826058832, |
| "learning_rate": 6.849327055044182e-06, |
| "loss": 0.2165, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.8845508616745463, |
| "grad_norm": 0.22793442853232132, |
| "learning_rate": 6.844382228757904e-06, |
| "loss": 0.2263, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.8851608967515632, |
| "grad_norm": 0.2133071411846939, |
| "learning_rate": 6.839435313570293e-06, |
| "loss": 0.2143, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.8857709318285801, |
| "grad_norm": 0.23989669045168097, |
| "learning_rate": 6.834486315084101e-06, |
| "loss": 0.2279, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.8863809669055971, |
| "grad_norm": 0.28193635749388085, |
| "learning_rate": 6.82953523890444e-06, |
| "loss": 0.2474, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.886991001982614, |
| "grad_norm": 0.21892568723573827, |
| "learning_rate": 6.824582090638777e-06, |
| "loss": 0.236, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.8876010370596309, |
| "grad_norm": 0.228144663125668, |
| "learning_rate": 6.819626875896924e-06, |
| "loss": 0.2194, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.8882110721366479, |
| "grad_norm": 0.2336697639430183, |
| "learning_rate": 6.814669600291033e-06, |
| "loss": 0.2244, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.8888211072136648, |
| "grad_norm": 0.23064852314498838, |
| "learning_rate": 6.80971026943559e-06, |
| "loss": 0.2359, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.8894311422906818, |
| "grad_norm": 0.21840239732400749, |
| "learning_rate": 6.804748888947412e-06, |
| "loss": 0.2246, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.8900411773676986, |
| "grad_norm": 0.2348987589111878, |
| "learning_rate": 6.799785464445633e-06, |
| "loss": 0.2491, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.8906512124447156, |
| "grad_norm": 0.2428043433600667, |
| "learning_rate": 6.794820001551703e-06, |
| "loss": 0.2621, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.8912612475217325, |
| "grad_norm": 0.22983238516221302, |
| "learning_rate": 6.789852505889384e-06, |
| "loss": 0.2426, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.8918712825987495, |
| "grad_norm": 0.23325969571323438, |
| "learning_rate": 6.784882983084735e-06, |
| "loss": 0.2225, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.8924813176757663, |
| "grad_norm": 0.21727207523138228, |
| "learning_rate": 6.779911438766117e-06, |
| "loss": 0.211, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.8930913527527833, |
| "grad_norm": 0.21790563597926196, |
| "learning_rate": 6.774937878564175e-06, |
| "loss": 0.2296, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.8937013878298002, |
| "grad_norm": 0.23237825441684618, |
| "learning_rate": 6.769962308111839e-06, |
| "loss": 0.2386, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.8943114229068171, |
| "grad_norm": 0.2245292716074995, |
| "learning_rate": 6.764984733044316e-06, |
| "loss": 0.2427, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.894921457983834, |
| "grad_norm": 0.23316404802575824, |
| "learning_rate": 6.7600051589990855e-06, |
| "loss": 0.237, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.895531493060851, |
| "grad_norm": 0.23682470929517443, |
| "learning_rate": 6.755023591615887e-06, |
| "loss": 0.2473, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.896141528137868, |
| "grad_norm": 0.2289751975303715, |
| "learning_rate": 6.750040036536718e-06, |
| "loss": 0.2436, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.8967515632148848, |
| "grad_norm": 0.2410302502040547, |
| "learning_rate": 6.745054499405833e-06, |
| "loss": 0.2363, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.8973615982919018, |
| "grad_norm": 0.23665105798930544, |
| "learning_rate": 6.740066985869724e-06, |
| "loss": 0.2328, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.8979716333689187, |
| "grad_norm": 0.268089173782497, |
| "learning_rate": 6.735077501577126e-06, |
| "loss": 0.2226, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.8985816684459357, |
| "grad_norm": 0.23567208315168722, |
| "learning_rate": 6.7300860521790034e-06, |
| "loss": 0.2443, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.8991917035229525, |
| "grad_norm": 0.22552131923653507, |
| "learning_rate": 6.725092643328548e-06, |
| "loss": 0.2373, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.8998017385999695, |
| "grad_norm": 0.21706492744114989, |
| "learning_rate": 6.72009728068117e-06, |
| "loss": 0.225, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.9004117736769864, |
| "grad_norm": 0.2327906203614174, |
| "learning_rate": 6.715099969894492e-06, |
| "loss": 0.2326, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.9010218087540034, |
| "grad_norm": 0.23253407140894464, |
| "learning_rate": 6.710100716628345e-06, |
| "loss": 0.226, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.9016318438310202, |
| "grad_norm": 0.22830354557774632, |
| "learning_rate": 6.705099526544757e-06, |
| "loss": 0.2539, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.9022418789080372, |
| "grad_norm": 0.24215917120267302, |
| "learning_rate": 6.700096405307952e-06, |
| "loss": 0.2487, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.9028519139850542, |
| "grad_norm": 0.24211803786412073, |
| "learning_rate": 6.69509135858434e-06, |
| "loss": 0.2492, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.9034619490620711, |
| "grad_norm": 0.24812360058343688, |
| "learning_rate": 6.690084392042514e-06, |
| "loss": 0.2219, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.904071984139088, |
| "grad_norm": 0.21262275648517254, |
| "learning_rate": 6.6850755113532385e-06, |
| "loss": 0.2234, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.9046820192161049, |
| "grad_norm": 0.23237360487836642, |
| "learning_rate": 6.680064722189445e-06, |
| "loss": 0.2312, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.9052920542931219, |
| "grad_norm": 0.22699230989997096, |
| "learning_rate": 6.675052030226231e-06, |
| "loss": 0.2332, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.9059020893701388, |
| "grad_norm": 0.2430314059683205, |
| "learning_rate": 6.670037441140844e-06, |
| "loss": 0.2449, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.9065121244471557, |
| "grad_norm": 0.22098823674287454, |
| "learning_rate": 6.665020960612685e-06, |
| "loss": 0.2344, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.9071221595241726, |
| "grad_norm": 0.24069656974852452, |
| "learning_rate": 6.6600025943232935e-06, |
| "loss": 0.2579, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.9077321946011896, |
| "grad_norm": 0.22598312198368797, |
| "learning_rate": 6.6549823479563445e-06, |
| "loss": 0.2487, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.9083422296782065, |
| "grad_norm": 0.22896335937970488, |
| "learning_rate": 6.649960227197648e-06, |
| "loss": 0.2288, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.9089522647552234, |
| "grad_norm": 0.23738911819834904, |
| "learning_rate": 6.644936237735129e-06, |
| "loss": 0.2515, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.9095622998322404, |
| "grad_norm": 0.2185717569808725, |
| "learning_rate": 6.639910385258834e-06, |
| "loss": 0.2392, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.9101723349092573, |
| "grad_norm": 0.2321320019483118, |
| "learning_rate": 6.6348826754609195e-06, |
| "loss": 0.2603, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.9107823699862743, |
| "grad_norm": 0.22568275472973856, |
| "learning_rate": 6.629853114035643e-06, |
| "loss": 0.2394, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 0.21026955772320566, |
| "learning_rate": 6.62482170667936e-06, |
| "loss": 0.2264, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.9120024401403081, |
| "grad_norm": 0.23213943301628961, |
| "learning_rate": 6.619788459090517e-06, |
| "loss": 0.2349, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.912612475217325, |
| "grad_norm": 0.22915949764604762, |
| "learning_rate": 6.614753376969647e-06, |
| "loss": 0.2295, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.9132225102943419, |
| "grad_norm": 0.23372792813000748, |
| "learning_rate": 6.609716466019356e-06, |
| "loss": 0.2209, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.9138325453713588, |
| "grad_norm": 0.2309167856093707, |
| "learning_rate": 6.604677731944323e-06, |
| "loss": 0.2267, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.9144425804483758, |
| "grad_norm": 0.2148406989040156, |
| "learning_rate": 6.599637180451295e-06, |
| "loss": 0.2114, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.9150526155253927, |
| "grad_norm": 0.2189166439537073, |
| "learning_rate": 6.594594817249075e-06, |
| "loss": 0.24, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.9150526155253927, |
| "eval_loss": 0.22871258854866028, |
| "eval_runtime": 265.6371, |
| "eval_samples_per_second": 4.028, |
| "eval_steps_per_second": 0.128, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.9156626506024096, |
| "grad_norm": 0.2519964998379413, |
| "learning_rate": 6.589550648048517e-06, |
| "loss": 0.2157, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.9162726856794265, |
| "grad_norm": 0.2513906292659038, |
| "learning_rate": 6.584504678562521e-06, |
| "loss": 0.2317, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.9168827207564435, |
| "grad_norm": 0.23399050870483937, |
| "learning_rate": 6.5794569145060264e-06, |
| "loss": 0.2383, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.9174927558334605, |
| "grad_norm": 0.2498121509819766, |
| "learning_rate": 6.574407361596005e-06, |
| "loss": 0.2305, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.9181027909104773, |
| "grad_norm": 0.2493812842752683, |
| "learning_rate": 6.569356025551454e-06, |
| "loss": 0.23, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.9187128259874943, |
| "grad_norm": 0.22780378406134064, |
| "learning_rate": 6.564302912093393e-06, |
| "loss": 0.2258, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.9193228610645112, |
| "grad_norm": 0.2325339328201163, |
| "learning_rate": 6.5592480269448485e-06, |
| "loss": 0.255, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.9199328961415282, |
| "grad_norm": 0.23399882400423566, |
| "learning_rate": 6.554191375830861e-06, |
| "loss": 0.2258, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.920542931218545, |
| "grad_norm": 0.22460401789572665, |
| "learning_rate": 6.5491329644784655e-06, |
| "loss": 0.214, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.921152966295562, |
| "grad_norm": 0.23146171330320892, |
| "learning_rate": 6.544072798616694e-06, |
| "loss": 0.2291, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.9217630013725789, |
| "grad_norm": 0.24084505508421777, |
| "learning_rate": 6.539010883976562e-06, |
| "loss": 0.2497, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.9223730364495959, |
| "grad_norm": 0.22304395266301494, |
| "learning_rate": 6.53394722629107e-06, |
| "loss": 0.2187, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.9229830715266127, |
| "grad_norm": 0.2387792130084646, |
| "learning_rate": 6.5288818312951886e-06, |
| "loss": 0.2357, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.9235931066036297, |
| "grad_norm": 0.2401822676734899, |
| "learning_rate": 6.523814704725861e-06, |
| "loss": 0.2242, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.9242031416806467, |
| "grad_norm": 0.23445665401381172, |
| "learning_rate": 6.518745852321985e-06, |
| "loss": 0.2237, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.9248131767576636, |
| "grad_norm": 0.2284841709830546, |
| "learning_rate": 6.513675279824416e-06, |
| "loss": 0.2267, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.9254232118346805, |
| "grad_norm": 0.2128083194347949, |
| "learning_rate": 6.508602992975963e-06, |
| "loss": 0.2048, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.9260332469116974, |
| "grad_norm": 0.2175291929915765, |
| "learning_rate": 6.503528997521365e-06, |
| "loss": 0.2317, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.9266432819887144, |
| "grad_norm": 0.21551752748043324, |
| "learning_rate": 6.4984532992073094e-06, |
| "loss": 0.216, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.9272533170657313, |
| "grad_norm": 0.22973453408383288, |
| "learning_rate": 6.493375903782402e-06, |
| "loss": 0.2357, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.9278633521427482, |
| "grad_norm": 0.23097559158343273, |
| "learning_rate": 6.4882968169971734e-06, |
| "loss": 0.2372, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.9284733872197651, |
| "grad_norm": 0.22636389176483931, |
| "learning_rate": 6.483216044604073e-06, |
| "loss": 0.213, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.9290834222967821, |
| "grad_norm": 0.24225414024833547, |
| "learning_rate": 6.478133592357455e-06, |
| "loss": 0.241, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.929693457373799, |
| "grad_norm": 0.22840372865504438, |
| "learning_rate": 6.473049466013579e-06, |
| "loss": 0.2399, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.9303034924508159, |
| "grad_norm": 0.21765688745780767, |
| "learning_rate": 6.467963671330602e-06, |
| "loss": 0.2231, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.9309135275278329, |
| "grad_norm": 0.22555043157288188, |
| "learning_rate": 6.462876214068563e-06, |
| "loss": 0.222, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.9315235626048498, |
| "grad_norm": 0.22859166767599118, |
| "learning_rate": 6.457787099989392e-06, |
| "loss": 0.2102, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.9321335976818667, |
| "grad_norm": 0.23530311284338487, |
| "learning_rate": 6.452696334856895e-06, |
| "loss": 0.2484, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.9327436327588836, |
| "grad_norm": 0.2146376266487681, |
| "learning_rate": 6.447603924436744e-06, |
| "loss": 0.2119, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.9333536678359006, |
| "grad_norm": 0.2283231397865005, |
| "learning_rate": 6.442509874496475e-06, |
| "loss": 0.2297, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.9339637029129175, |
| "grad_norm": 0.21933733150433, |
| "learning_rate": 6.437414190805482e-06, |
| "loss": 0.2268, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.9345737379899344, |
| "grad_norm": 0.22898995559601057, |
| "learning_rate": 6.432316879135012e-06, |
| "loss": 0.2293, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.9351837730669513, |
| "grad_norm": 0.23928333789077255, |
| "learning_rate": 6.4272179452581505e-06, |
| "loss": 0.2358, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.9357938081439683, |
| "grad_norm": 0.22245443866611708, |
| "learning_rate": 6.422117394949824e-06, |
| "loss": 0.2196, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.9364038432209852, |
| "grad_norm": 0.2271006986531639, |
| "learning_rate": 6.417015233986786e-06, |
| "loss": 0.2434, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.9370138782980021, |
| "grad_norm": 0.2592277983360455, |
| "learning_rate": 6.41191146814762e-06, |
| "loss": 0.2214, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.937623913375019, |
| "grad_norm": 0.228391871107343, |
| "learning_rate": 6.406806103212725e-06, |
| "loss": 0.2122, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.938233948452036, |
| "grad_norm": 0.21874020313662693, |
| "learning_rate": 6.401699144964306e-06, |
| "loss": 0.2196, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.938843983529053, |
| "grad_norm": 0.23244284042470695, |
| "learning_rate": 6.3965905991863805e-06, |
| "loss": 0.233, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.9394540186060698, |
| "grad_norm": 0.22457758396597863, |
| "learning_rate": 6.391480471664757e-06, |
| "loss": 0.2362, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.9400640536830868, |
| "grad_norm": 0.2336482299735738, |
| "learning_rate": 6.38636876818704e-06, |
| "loss": 0.2326, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.9406740887601037, |
| "grad_norm": 0.2189721244318323, |
| "learning_rate": 6.381255494542618e-06, |
| "loss": 0.2167, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.9412841238371207, |
| "grad_norm": 0.2994373279270973, |
| "learning_rate": 6.376140656522656e-06, |
| "loss": 0.2428, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.9418941589141375, |
| "grad_norm": 0.33778356800748166, |
| "learning_rate": 6.371024259920091e-06, |
| "loss": 0.2371, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.9425041939911545, |
| "grad_norm": 0.21829401941414528, |
| "learning_rate": 6.365906310529631e-06, |
| "loss": 0.2361, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.9431142290681714, |
| "grad_norm": 0.23289317417704491, |
| "learning_rate": 6.360786814147731e-06, |
| "loss": 0.2453, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.9437242641451884, |
| "grad_norm": 0.25579141903640873, |
| "learning_rate": 6.3556657765726116e-06, |
| "loss": 0.221, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.9443342992222052, |
| "grad_norm": 0.23020496289050363, |
| "learning_rate": 6.350543203604228e-06, |
| "loss": 0.2244, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.9449443342992222, |
| "grad_norm": 0.2211248442137757, |
| "learning_rate": 6.345419101044281e-06, |
| "loss": 0.2175, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.9455543693762392, |
| "grad_norm": 0.22858010072311402, |
| "learning_rate": 6.340293474696198e-06, |
| "loss": 0.2345, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.9461644044532561, |
| "grad_norm": 0.2374051390192958, |
| "learning_rate": 6.335166330365141e-06, |
| "loss": 0.2223, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.946774439530273, |
| "grad_norm": 0.22322603988882175, |
| "learning_rate": 6.330037673857982e-06, |
| "loss": 0.2309, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.9473844746072899, |
| "grad_norm": 0.23828346993481017, |
| "learning_rate": 6.32490751098331e-06, |
| "loss": 0.2245, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.9479945096843069, |
| "grad_norm": 0.25786837395107554, |
| "learning_rate": 6.3197758475514195e-06, |
| "loss": 0.2414, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.9486045447613238, |
| "grad_norm": 0.21358766242427954, |
| "learning_rate": 6.314642689374304e-06, |
| "loss": 0.2082, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.9492145798383407, |
| "grad_norm": 0.20849750694020697, |
| "learning_rate": 6.309508042265654e-06, |
| "loss": 0.2007, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.9498246149153576, |
| "grad_norm": 0.21895390787981686, |
| "learning_rate": 6.30437191204084e-06, |
| "loss": 0.2209, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.9504346499923746, |
| "grad_norm": 0.24064011815601305, |
| "learning_rate": 6.299234304516914e-06, |
| "loss": 0.2374, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.9510446850693914, |
| "grad_norm": 0.22325511306252963, |
| "learning_rate": 6.294095225512604e-06, |
| "loss": 0.2308, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.9516547201464084, |
| "grad_norm": 0.23327277715190625, |
| "learning_rate": 6.288954680848303e-06, |
| "loss": 0.2352, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.9522647552234254, |
| "grad_norm": 0.22259263112982883, |
| "learning_rate": 6.2838126763460635e-06, |
| "loss": 0.223, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.9528747903004423, |
| "grad_norm": 0.22604430426038336, |
| "learning_rate": 6.27866921782959e-06, |
| "loss": 0.2166, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.9534848253774592, |
| "grad_norm": 0.21851943667633414, |
| "learning_rate": 6.273524311124237e-06, |
| "loss": 0.2073, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.9540948604544761, |
| "grad_norm": 0.2548954994078049, |
| "learning_rate": 6.268377962056999e-06, |
| "loss": 0.2328, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.9547048955314931, |
| "grad_norm": 0.23804348920601198, |
| "learning_rate": 6.263230176456497e-06, |
| "loss": 0.2146, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.95531493060851, |
| "grad_norm": 0.2266315320160639, |
| "learning_rate": 6.258080960152991e-06, |
| "loss": 0.2181, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.9559249656855269, |
| "grad_norm": 0.23542998803866033, |
| "learning_rate": 6.252930318978353e-06, |
| "loss": 0.2385, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.9565350007625438, |
| "grad_norm": 0.21317840339021937, |
| "learning_rate": 6.247778258766069e-06, |
| "loss": 0.212, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.9571450358395608, |
| "grad_norm": 0.2355547595678835, |
| "learning_rate": 6.2426247853512355e-06, |
| "loss": 0.2493, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.9577550709165777, |
| "grad_norm": 0.2365566242506874, |
| "learning_rate": 6.237469904570549e-06, |
| "loss": 0.2394, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.9583651059935946, |
| "grad_norm": 0.2351549924941449, |
| "learning_rate": 6.232313622262297e-06, |
| "loss": 0.2349, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.9589751410706115, |
| "grad_norm": 0.2153592643562907, |
| "learning_rate": 6.227155944266358e-06, |
| "loss": 0.2272, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.9595851761476285, |
| "grad_norm": 0.22869892531056712, |
| "learning_rate": 6.221996876424186e-06, |
| "loss": 0.249, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.9601952112246455, |
| "grad_norm": 0.23218107560341958, |
| "learning_rate": 6.216836424578816e-06, |
| "loss": 0.2243, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.9608052463016623, |
| "grad_norm": 0.23492027645928484, |
| "learning_rate": 6.211674594574847e-06, |
| "loss": 0.207, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.9614152813786793, |
| "grad_norm": 0.22911404127241716, |
| "learning_rate": 6.206511392258439e-06, |
| "loss": 0.229, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.9620253164556962, |
| "grad_norm": 0.2479077538171469, |
| "learning_rate": 6.2013468234773034e-06, |
| "loss": 0.2342, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.9626353515327132, |
| "grad_norm": 0.24617532460859282, |
| "learning_rate": 6.196180894080707e-06, |
| "loss": 0.2491, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.96324538660973, |
| "grad_norm": 0.22360258704509595, |
| "learning_rate": 6.191013609919448e-06, |
| "loss": 0.2256, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.963855421686747, |
| "grad_norm": 0.22912652157745447, |
| "learning_rate": 6.185844976845866e-06, |
| "loss": 0.2266, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.9644654567637639, |
| "grad_norm": 0.23041424403533584, |
| "learning_rate": 6.180675000713825e-06, |
| "loss": 0.2244, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.9650754918407809, |
| "grad_norm": 0.3259350286108923, |
| "learning_rate": 6.175503687378711e-06, |
| "loss": 0.245, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.9656855269177977, |
| "grad_norm": 0.2213882091045891, |
| "learning_rate": 6.170331042697425e-06, |
| "loss": 0.2309, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.9662955619948147, |
| "grad_norm": 0.2309385259789968, |
| "learning_rate": 6.1651570725283774e-06, |
| "loss": 0.2197, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.9669055970718317, |
| "grad_norm": 0.2276259703395926, |
| "learning_rate": 6.1599817827314744e-06, |
| "loss": 0.2468, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.9675156321488485, |
| "grad_norm": 0.2173855057127278, |
| "learning_rate": 6.154805179168122e-06, |
| "loss": 0.2299, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.9681256672258655, |
| "grad_norm": 0.22955242541686519, |
| "learning_rate": 6.149627267701212e-06, |
| "loss": 0.2314, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.9687357023028824, |
| "grad_norm": 0.24358603947580434, |
| "learning_rate": 6.144448054195119e-06, |
| "loss": 0.2214, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.9693457373798994, |
| "grad_norm": 0.22127455539624566, |
| "learning_rate": 6.139267544515689e-06, |
| "loss": 0.2297, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.9699557724569162, |
| "grad_norm": 0.24106981499557115, |
| "learning_rate": 6.1340857445302395e-06, |
| "loss": 0.227, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.9705658075339332, |
| "grad_norm": 0.22531162899583024, |
| "learning_rate": 6.128902660107547e-06, |
| "loss": 0.23, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.9711758426109501, |
| "grad_norm": 0.21576028626875046, |
| "learning_rate": 6.123718297117844e-06, |
| "loss": 0.2182, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.9717858776879671, |
| "grad_norm": 0.22383451511812855, |
| "learning_rate": 6.118532661432812e-06, |
| "loss": 0.2192, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.9723959127649839, |
| "grad_norm": 0.22162444020782598, |
| "learning_rate": 6.113345758925572e-06, |
| "loss": 0.2337, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.9730059478420009, |
| "grad_norm": 0.23306688141707715, |
| "learning_rate": 6.10815759547068e-06, |
| "loss": 0.2327, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.9736159829190179, |
| "grad_norm": 0.24668096163914657, |
| "learning_rate": 6.1029681769441195e-06, |
| "loss": 0.2362, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.9742260179960348, |
| "grad_norm": 0.22771560133975774, |
| "learning_rate": 6.097777509223299e-06, |
| "loss": 0.2181, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.9748360530730517, |
| "grad_norm": 0.227152471220962, |
| "learning_rate": 6.09258559818704e-06, |
| "loss": 0.228, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.9754460881500686, |
| "grad_norm": 0.24505550426884296, |
| "learning_rate": 6.087392449715568e-06, |
| "loss": 0.2365, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.9760561232270856, |
| "grad_norm": 0.2594678512272388, |
| "learning_rate": 6.0821980696905145e-06, |
| "loss": 0.2347, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.9766661583041025, |
| "grad_norm": 0.22648860453973974, |
| "learning_rate": 6.077002463994908e-06, |
| "loss": 0.2292, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.9772761933811194, |
| "grad_norm": 0.217024573137216, |
| "learning_rate": 6.0718056385131575e-06, |
| "loss": 0.2097, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.9778862284581363, |
| "grad_norm": 0.23583942188243961, |
| "learning_rate": 6.066607599131061e-06, |
| "loss": 0.2061, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.9784962635351533, |
| "grad_norm": 0.22690683270385684, |
| "learning_rate": 6.061408351735787e-06, |
| "loss": 0.2376, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.9791062986121702, |
| "grad_norm": 0.22111549660672086, |
| "learning_rate": 6.056207902215874e-06, |
| "loss": 0.2213, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.9797163336891871, |
| "grad_norm": 0.26246371397002916, |
| "learning_rate": 6.051006256461221e-06, |
| "loss": 0.2174, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.980326368766204, |
| "grad_norm": 0.22829641873217374, |
| "learning_rate": 6.045803420363085e-06, |
| "loss": 0.2238, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.980936403843221, |
| "grad_norm": 0.22819806706388343, |
| "learning_rate": 6.040599399814064e-06, |
| "loss": 0.2078, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.981546438920238, |
| "grad_norm": 0.23728526726592925, |
| "learning_rate": 6.035394200708104e-06, |
| "loss": 0.2246, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.9821564739972548, |
| "grad_norm": 0.22654727333712824, |
| "learning_rate": 6.030187828940485e-06, |
| "loss": 0.2085, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.9827665090742718, |
| "grad_norm": 0.2181113150986954, |
| "learning_rate": 6.024980290407813e-06, |
| "loss": 0.2051, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.9833765441512887, |
| "grad_norm": 0.22462912016487835, |
| "learning_rate": 6.019771591008015e-06, |
| "loss": 0.2116, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.9839865792283057, |
| "grad_norm": 0.2179899542904998, |
| "learning_rate": 6.014561736640334e-06, |
| "loss": 0.2244, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.9845966143053225, |
| "grad_norm": 0.2286697668059702, |
| "learning_rate": 6.009350733205324e-06, |
| "loss": 0.2286, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.9852066493823395, |
| "grad_norm": 0.24353389205515807, |
| "learning_rate": 6.0041385866048356e-06, |
| "loss": 0.2314, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.9858166844593564, |
| "grad_norm": 0.217771023881922, |
| "learning_rate": 5.998925302742017e-06, |
| "loss": 0.2182, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.9864267195363733, |
| "grad_norm": 0.2475617013818729, |
| "learning_rate": 5.993710887521302e-06, |
| "loss": 0.2686, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.9870367546133902, |
| "grad_norm": 0.23044990847915867, |
| "learning_rate": 5.988495346848409e-06, |
| "loss": 0.2369, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.9876467896904072, |
| "grad_norm": 0.2545012934007029, |
| "learning_rate": 5.983278686630327e-06, |
| "loss": 0.2435, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.9882568247674242, |
| "grad_norm": 0.26432844279930834, |
| "learning_rate": 5.978060912775319e-06, |
| "loss": 0.2241, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.988866859844441, |
| "grad_norm": 0.22084458591049622, |
| "learning_rate": 5.972842031192901e-06, |
| "loss": 0.2143, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.989476894921458, |
| "grad_norm": 0.2170629136166457, |
| "learning_rate": 5.967622047793853e-06, |
| "loss": 0.2133, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.9900869299984749, |
| "grad_norm": 0.21954983443109286, |
| "learning_rate": 5.962400968490194e-06, |
| "loss": 0.2265, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.9906969650754919, |
| "grad_norm": 0.2305444478220999, |
| "learning_rate": 5.957178799195191e-06, |
| "loss": 0.2213, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.9913070001525087, |
| "grad_norm": 0.23003753615902042, |
| "learning_rate": 5.951955545823342e-06, |
| "loss": 0.2289, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.9919170352295257, |
| "grad_norm": 0.23140788001630902, |
| "learning_rate": 5.946731214290373e-06, |
| "loss": 0.246, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.9925270703065426, |
| "grad_norm": 0.21227594828029972, |
| "learning_rate": 5.941505810513233e-06, |
| "loss": 0.2031, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.9931371053835596, |
| "grad_norm": 0.22010918804701388, |
| "learning_rate": 5.936279340410082e-06, |
| "loss": 0.2288, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.9937471404605764, |
| "grad_norm": 0.23602106031097625, |
| "learning_rate": 5.93105180990029e-06, |
| "loss": 0.2179, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.9943571755375934, |
| "grad_norm": 0.2309016046215796, |
| "learning_rate": 5.925823224904429e-06, |
| "loss": 0.2104, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.9949672106146104, |
| "grad_norm": 0.23950678926346633, |
| "learning_rate": 5.920593591344264e-06, |
| "loss": 0.2311, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.9955772456916273, |
| "grad_norm": 0.22637480916172084, |
| "learning_rate": 5.9153629151427475e-06, |
| "loss": 0.2307, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.9961872807686442, |
| "grad_norm": 0.22041864861326502, |
| "learning_rate": 5.910131202224011e-06, |
| "loss": 0.2083, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.9967973158456611, |
| "grad_norm": 0.22650306744690762, |
| "learning_rate": 5.904898458513365e-06, |
| "loss": 0.2206, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.9974073509226781, |
| "grad_norm": 0.22749499229284315, |
| "learning_rate": 5.899664689937283e-06, |
| "loss": 0.2373, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.998017385999695, |
| "grad_norm": 0.21288210761993082, |
| "learning_rate": 5.894429902423402e-06, |
| "loss": 0.2173, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.9986274210767119, |
| "grad_norm": 0.2217043488048322, |
| "learning_rate": 5.8891941019005095e-06, |
| "loss": 0.2329, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.9992374561537288, |
| "grad_norm": 0.27082147748875746, |
| "learning_rate": 5.883957294298545e-06, |
| "loss": 0.237, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.9998474912307458, |
| "grad_norm": 0.2390081544955565, |
| "learning_rate": 5.8787194855485855e-06, |
| "loss": 0.2498, |
| "step": 1639 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.4362468690070915, |
| "learning_rate": 5.87348068158284e-06, |
| "loss": 0.2575, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.0006100350770168, |
| "grad_norm": 0.24525124320565972, |
| "learning_rate": 5.8682408883346535e-06, |
| "loss": 0.2165, |
| "step": 1641 |
| }, |
| { |
| "epoch": 1.001220070154034, |
| "grad_norm": 0.22532467581428622, |
| "learning_rate": 5.863000111738479e-06, |
| "loss": 0.1919, |
| "step": 1642 |
| }, |
| { |
| "epoch": 1.0018301052310508, |
| "grad_norm": 0.25364441859572456, |
| "learning_rate": 5.857758357729892e-06, |
| "loss": 0.2281, |
| "step": 1643 |
| }, |
| { |
| "epoch": 1.0024401403080676, |
| "grad_norm": 0.23172737765478774, |
| "learning_rate": 5.852515632245574e-06, |
| "loss": 0.2036, |
| "step": 1644 |
| }, |
| { |
| "epoch": 1.0030501753850847, |
| "grad_norm": 0.22918099355509405, |
| "learning_rate": 5.847271941223301e-06, |
| "loss": 0.2194, |
| "step": 1645 |
| }, |
| { |
| "epoch": 1.0036602104621015, |
| "grad_norm": 0.2328036742592211, |
| "learning_rate": 5.8420272906019506e-06, |
| "loss": 0.2154, |
| "step": 1646 |
| }, |
| { |
| "epoch": 1.0042702455391186, |
| "grad_norm": 0.2528041564358456, |
| "learning_rate": 5.8367816863214825e-06, |
| "loss": 0.2251, |
| "step": 1647 |
| }, |
| { |
| "epoch": 1.0048802806161354, |
| "grad_norm": 0.24623727405835405, |
| "learning_rate": 5.831535134322935e-06, |
| "loss": 0.2192, |
| "step": 1648 |
| }, |
| { |
| "epoch": 1.0054903156931523, |
| "grad_norm": 0.23508962134126507, |
| "learning_rate": 5.826287640548425e-06, |
| "loss": 0.2014, |
| "step": 1649 |
| }, |
| { |
| "epoch": 1.0061003507701693, |
| "grad_norm": 0.2545397092728682, |
| "learning_rate": 5.821039210941133e-06, |
| "loss": 0.2247, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.0067103858471862, |
| "grad_norm": 0.2341766055191371, |
| "learning_rate": 5.815789851445296e-06, |
| "loss": 0.2184, |
| "step": 1651 |
| }, |
| { |
| "epoch": 1.007320420924203, |
| "grad_norm": 0.2304950075693666, |
| "learning_rate": 5.810539568006213e-06, |
| "loss": 0.1997, |
| "step": 1652 |
| }, |
| { |
| "epoch": 1.00793045600122, |
| "grad_norm": 0.23771967596273264, |
| "learning_rate": 5.80528836657022e-06, |
| "loss": 0.2124, |
| "step": 1653 |
| }, |
| { |
| "epoch": 1.008540491078237, |
| "grad_norm": 0.23310797936523897, |
| "learning_rate": 5.800036253084699e-06, |
| "loss": 0.2084, |
| "step": 1654 |
| }, |
| { |
| "epoch": 1.009150526155254, |
| "grad_norm": 0.24051112098620298, |
| "learning_rate": 5.794783233498062e-06, |
| "loss": 0.2118, |
| "step": 1655 |
| }, |
| { |
| "epoch": 1.0097605612322709, |
| "grad_norm": 0.22746424422800543, |
| "learning_rate": 5.789529313759746e-06, |
| "loss": 0.2139, |
| "step": 1656 |
| }, |
| { |
| "epoch": 1.0103705963092877, |
| "grad_norm": 0.24267923602790298, |
| "learning_rate": 5.784274499820214e-06, |
| "loss": 0.2071, |
| "step": 1657 |
| }, |
| { |
| "epoch": 1.0109806313863048, |
| "grad_norm": 0.24322898160992537, |
| "learning_rate": 5.779018797630934e-06, |
| "loss": 0.2041, |
| "step": 1658 |
| }, |
| { |
| "epoch": 1.0115906664633216, |
| "grad_norm": 0.23510639718676693, |
| "learning_rate": 5.773762213144384e-06, |
| "loss": 0.2333, |
| "step": 1659 |
| }, |
| { |
| "epoch": 1.0122007015403385, |
| "grad_norm": 0.2447061546009269, |
| "learning_rate": 5.76850475231404e-06, |
| "loss": 0.2167, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.0128107366173555, |
| "grad_norm": 0.2564959269452337, |
| "learning_rate": 5.763246421094373e-06, |
| "loss": 0.2134, |
| "step": 1661 |
| }, |
| { |
| "epoch": 1.0134207716943724, |
| "grad_norm": 0.2388834769977569, |
| "learning_rate": 5.757987225440836e-06, |
| "loss": 0.1941, |
| "step": 1662 |
| }, |
| { |
| "epoch": 1.0140308067713895, |
| "grad_norm": 0.2232628943076604, |
| "learning_rate": 5.7527271713098645e-06, |
| "loss": 0.2023, |
| "step": 1663 |
| }, |
| { |
| "epoch": 1.0146408418484063, |
| "grad_norm": 0.23877377779070566, |
| "learning_rate": 5.747466264658863e-06, |
| "loss": 0.2216, |
| "step": 1664 |
| }, |
| { |
| "epoch": 1.0152508769254232, |
| "grad_norm": 0.23347183808731123, |
| "learning_rate": 5.742204511446203e-06, |
| "loss": 0.2154, |
| "step": 1665 |
| }, |
| { |
| "epoch": 1.0158609120024402, |
| "grad_norm": 0.23259316764185303, |
| "learning_rate": 5.736941917631217e-06, |
| "loss": 0.214, |
| "step": 1666 |
| }, |
| { |
| "epoch": 1.016470947079457, |
| "grad_norm": 0.2524496772639378, |
| "learning_rate": 5.731678489174186e-06, |
| "loss": 0.2389, |
| "step": 1667 |
| }, |
| { |
| "epoch": 1.017080982156474, |
| "grad_norm": 0.22833440973811123, |
| "learning_rate": 5.7264142320363384e-06, |
| "loss": 0.213, |
| "step": 1668 |
| }, |
| { |
| "epoch": 1.017691017233491, |
| "grad_norm": 0.2361574490453357, |
| "learning_rate": 5.72114915217984e-06, |
| "loss": 0.219, |
| "step": 1669 |
| }, |
| { |
| "epoch": 1.0183010523105078, |
| "grad_norm": 0.2280786951276804, |
| "learning_rate": 5.7158832555677904e-06, |
| "loss": 0.2108, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.018911087387525, |
| "grad_norm": 0.23367357132543207, |
| "learning_rate": 5.710616548164212e-06, |
| "loss": 0.2061, |
| "step": 1671 |
| }, |
| { |
| "epoch": 1.0195211224645417, |
| "grad_norm": 0.236030915869193, |
| "learning_rate": 5.705349035934047e-06, |
| "loss": 0.2025, |
| "step": 1672 |
| }, |
| { |
| "epoch": 1.0201311575415586, |
| "grad_norm": 0.22653660507034204, |
| "learning_rate": 5.7000807248431466e-06, |
| "loss": 0.1959, |
| "step": 1673 |
| }, |
| { |
| "epoch": 1.0207411926185757, |
| "grad_norm": 0.24496796300881274, |
| "learning_rate": 5.694811620858269e-06, |
| "loss": 0.2132, |
| "step": 1674 |
| }, |
| { |
| "epoch": 1.0213512276955925, |
| "grad_norm": 0.23452084512565427, |
| "learning_rate": 5.689541729947071e-06, |
| "loss": 0.2209, |
| "step": 1675 |
| }, |
| { |
| "epoch": 1.0219612627726093, |
| "grad_norm": 0.25138673187324173, |
| "learning_rate": 5.684271058078101e-06, |
| "loss": 0.2199, |
| "step": 1676 |
| }, |
| { |
| "epoch": 1.0225712978496264, |
| "grad_norm": 0.24117248054105317, |
| "learning_rate": 5.6789996112207865e-06, |
| "loss": 0.2204, |
| "step": 1677 |
| }, |
| { |
| "epoch": 1.0231813329266433, |
| "grad_norm": 0.23694095756163946, |
| "learning_rate": 5.673727395345442e-06, |
| "loss": 0.2154, |
| "step": 1678 |
| }, |
| { |
| "epoch": 1.02379136800366, |
| "grad_norm": 0.23021351407090954, |
| "learning_rate": 5.668454416423243e-06, |
| "loss": 0.2344, |
| "step": 1679 |
| }, |
| { |
| "epoch": 1.0244014030806772, |
| "grad_norm": 0.24354787085020255, |
| "learning_rate": 5.663180680426237e-06, |
| "loss": 0.2215, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.025011438157694, |
| "grad_norm": 0.23398078469245429, |
| "learning_rate": 5.657906193327325e-06, |
| "loss": 0.1964, |
| "step": 1681 |
| }, |
| { |
| "epoch": 1.025621473234711, |
| "grad_norm": 0.23555210978513935, |
| "learning_rate": 5.65263096110026e-06, |
| "loss": 0.2113, |
| "step": 1682 |
| }, |
| { |
| "epoch": 1.026231508311728, |
| "grad_norm": 0.24099367744273603, |
| "learning_rate": 5.647354989719635e-06, |
| "loss": 0.2326, |
| "step": 1683 |
| }, |
| { |
| "epoch": 1.0268415433887448, |
| "grad_norm": 0.2364743426107808, |
| "learning_rate": 5.64207828516089e-06, |
| "loss": 0.213, |
| "step": 1684 |
| }, |
| { |
| "epoch": 1.0274515784657619, |
| "grad_norm": 0.24114895326828484, |
| "learning_rate": 5.636800853400285e-06, |
| "loss": 0.2187, |
| "step": 1685 |
| }, |
| { |
| "epoch": 1.0280616135427787, |
| "grad_norm": 0.22602307697039975, |
| "learning_rate": 5.631522700414909e-06, |
| "loss": 0.2138, |
| "step": 1686 |
| }, |
| { |
| "epoch": 1.0286716486197955, |
| "grad_norm": 0.22019981637444508, |
| "learning_rate": 5.626243832182663e-06, |
| "loss": 0.1822, |
| "step": 1687 |
| }, |
| { |
| "epoch": 1.0292816836968126, |
| "grad_norm": 0.24498869528300118, |
| "learning_rate": 5.620964254682267e-06, |
| "loss": 0.229, |
| "step": 1688 |
| }, |
| { |
| "epoch": 1.0298917187738295, |
| "grad_norm": 0.22457143436015983, |
| "learning_rate": 5.615683973893235e-06, |
| "loss": 0.2321, |
| "step": 1689 |
| }, |
| { |
| "epoch": 1.0305017538508465, |
| "grad_norm": 0.230241215035034, |
| "learning_rate": 5.610402995795884e-06, |
| "loss": 0.2092, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.0311117889278634, |
| "grad_norm": 0.23816566700218206, |
| "learning_rate": 5.605121326371316e-06, |
| "loss": 0.2201, |
| "step": 1691 |
| }, |
| { |
| "epoch": 1.0317218240048802, |
| "grad_norm": 0.2450343849705002, |
| "learning_rate": 5.599838971601418e-06, |
| "loss": 0.2182, |
| "step": 1692 |
| }, |
| { |
| "epoch": 1.0323318590818973, |
| "grad_norm": 0.24422927352433343, |
| "learning_rate": 5.594555937468856e-06, |
| "loss": 0.2252, |
| "step": 1693 |
| }, |
| { |
| "epoch": 1.0329418941589141, |
| "grad_norm": 0.2343514132682759, |
| "learning_rate": 5.589272229957061e-06, |
| "loss": 0.185, |
| "step": 1694 |
| }, |
| { |
| "epoch": 1.033551929235931, |
| "grad_norm": 0.2361203993950127, |
| "learning_rate": 5.583987855050228e-06, |
| "loss": 0.2063, |
| "step": 1695 |
| }, |
| { |
| "epoch": 1.034161964312948, |
| "grad_norm": 0.2565116992016366, |
| "learning_rate": 5.578702818733308e-06, |
| "loss": 0.2156, |
| "step": 1696 |
| }, |
| { |
| "epoch": 1.034771999389965, |
| "grad_norm": 0.2546156826740772, |
| "learning_rate": 5.573417126992004e-06, |
| "loss": 0.2415, |
| "step": 1697 |
| }, |
| { |
| "epoch": 1.035382034466982, |
| "grad_norm": 0.23336883260302368, |
| "learning_rate": 5.568130785812755e-06, |
| "loss": 0.222, |
| "step": 1698 |
| }, |
| { |
| "epoch": 1.0359920695439988, |
| "grad_norm": 0.25462328137310886, |
| "learning_rate": 5.562843801182741e-06, |
| "loss": 0.2129, |
| "step": 1699 |
| }, |
| { |
| "epoch": 1.0366021046210157, |
| "grad_norm": 0.24585946060544714, |
| "learning_rate": 5.5575561790898705e-06, |
| "loss": 0.235, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.0372121396980327, |
| "grad_norm": 0.24127705207317346, |
| "learning_rate": 5.55226792552277e-06, |
| "loss": 0.2231, |
| "step": 1701 |
| }, |
| { |
| "epoch": 1.0378221747750496, |
| "grad_norm": 0.24249186568336176, |
| "learning_rate": 5.546979046470784e-06, |
| "loss": 0.2198, |
| "step": 1702 |
| }, |
| { |
| "epoch": 1.0384322098520664, |
| "grad_norm": 0.2841185684942545, |
| "learning_rate": 5.5416895479239665e-06, |
| "loss": 0.2212, |
| "step": 1703 |
| }, |
| { |
| "epoch": 1.0390422449290835, |
| "grad_norm": 0.22862341833091082, |
| "learning_rate": 5.536399435873071e-06, |
| "loss": 0.2184, |
| "step": 1704 |
| }, |
| { |
| "epoch": 1.0396522800061003, |
| "grad_norm": 0.2511982074093489, |
| "learning_rate": 5.5311087163095475e-06, |
| "loss": 0.2066, |
| "step": 1705 |
| }, |
| { |
| "epoch": 1.0402623150831172, |
| "grad_norm": 0.2313982450490478, |
| "learning_rate": 5.52581739522553e-06, |
| "loss": 0.212, |
| "step": 1706 |
| }, |
| { |
| "epoch": 1.0408723501601342, |
| "grad_norm": 0.23697031192994897, |
| "learning_rate": 5.520525478613838e-06, |
| "loss": 0.2121, |
| "step": 1707 |
| }, |
| { |
| "epoch": 1.041482385237151, |
| "grad_norm": 0.2561892836965319, |
| "learning_rate": 5.515232972467969e-06, |
| "loss": 0.2199, |
| "step": 1708 |
| }, |
| { |
| "epoch": 1.0420924203141682, |
| "grad_norm": 0.244397771634611, |
| "learning_rate": 5.509939882782077e-06, |
| "loss": 0.2068, |
| "step": 1709 |
| }, |
| { |
| "epoch": 1.042702455391185, |
| "grad_norm": 0.23380824757735447, |
| "learning_rate": 5.504646215550988e-06, |
| "loss": 0.2211, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.0433124904682018, |
| "grad_norm": 0.2452556417197051, |
| "learning_rate": 5.4993519767701745e-06, |
| "loss": 0.2125, |
| "step": 1711 |
| }, |
| { |
| "epoch": 1.043922525545219, |
| "grad_norm": 0.2305296614582525, |
| "learning_rate": 5.494057172435761e-06, |
| "loss": 0.2102, |
| "step": 1712 |
| }, |
| { |
| "epoch": 1.0445325606222358, |
| "grad_norm": 0.24478341739491696, |
| "learning_rate": 5.4887618085445094e-06, |
| "loss": 0.2128, |
| "step": 1713 |
| }, |
| { |
| "epoch": 1.0451425956992526, |
| "grad_norm": 0.22777418813905734, |
| "learning_rate": 5.48346589109382e-06, |
| "loss": 0.1938, |
| "step": 1714 |
| }, |
| { |
| "epoch": 1.0457526307762697, |
| "grad_norm": 0.2596034979250406, |
| "learning_rate": 5.478169426081712e-06, |
| "loss": 0.2123, |
| "step": 1715 |
| }, |
| { |
| "epoch": 1.0463626658532865, |
| "grad_norm": 0.241016728765862, |
| "learning_rate": 5.472872419506833e-06, |
| "loss": 0.2092, |
| "step": 1716 |
| }, |
| { |
| "epoch": 1.0469727009303036, |
| "grad_norm": 0.21788730778084672, |
| "learning_rate": 5.467574877368441e-06, |
| "loss": 0.1909, |
| "step": 1717 |
| }, |
| { |
| "epoch": 1.0475827360073204, |
| "grad_norm": 0.24697089084624776, |
| "learning_rate": 5.4622768056664e-06, |
| "loss": 0.2083, |
| "step": 1718 |
| }, |
| { |
| "epoch": 1.0481927710843373, |
| "grad_norm": 0.25807379681608617, |
| "learning_rate": 5.4569782104011725e-06, |
| "loss": 0.2187, |
| "step": 1719 |
| }, |
| { |
| "epoch": 1.0488028061613544, |
| "grad_norm": 0.24314953105974776, |
| "learning_rate": 5.4516790975738146e-06, |
| "loss": 0.2248, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.0494128412383712, |
| "grad_norm": 0.23758838267771906, |
| "learning_rate": 5.446379473185972e-06, |
| "loss": 0.1975, |
| "step": 1721 |
| }, |
| { |
| "epoch": 1.050022876315388, |
| "grad_norm": 0.25168940382766586, |
| "learning_rate": 5.441079343239867e-06, |
| "loss": 0.2196, |
| "step": 1722 |
| }, |
| { |
| "epoch": 1.0506329113924051, |
| "grad_norm": 0.23974904667847913, |
| "learning_rate": 5.435778713738292e-06, |
| "loss": 0.2012, |
| "step": 1723 |
| }, |
| { |
| "epoch": 1.051242946469422, |
| "grad_norm": 0.225951684544416, |
| "learning_rate": 5.4304775906846095e-06, |
| "loss": 0.2175, |
| "step": 1724 |
| }, |
| { |
| "epoch": 1.051852981546439, |
| "grad_norm": 0.23899014950838993, |
| "learning_rate": 5.42517598008274e-06, |
| "loss": 0.206, |
| "step": 1725 |
| }, |
| { |
| "epoch": 1.0524630166234559, |
| "grad_norm": 0.21642954447018442, |
| "learning_rate": 5.419873887937154e-06, |
| "loss": 0.1942, |
| "step": 1726 |
| }, |
| { |
| "epoch": 1.0530730517004727, |
| "grad_norm": 0.2287863512942669, |
| "learning_rate": 5.41457132025287e-06, |
| "loss": 0.2156, |
| "step": 1727 |
| }, |
| { |
| "epoch": 1.0536830867774898, |
| "grad_norm": 0.23290535502397355, |
| "learning_rate": 5.4092682830354435e-06, |
| "loss": 0.2085, |
| "step": 1728 |
| }, |
| { |
| "epoch": 1.0542931218545066, |
| "grad_norm": 0.23491286063679967, |
| "learning_rate": 5.403964782290962e-06, |
| "loss": 0.224, |
| "step": 1729 |
| }, |
| { |
| "epoch": 1.0549031569315235, |
| "grad_norm": 0.22898564744066244, |
| "learning_rate": 5.398660824026039e-06, |
| "loss": 0.2147, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.0555131920085405, |
| "grad_norm": 0.23321342607229884, |
| "learning_rate": 5.393356414247806e-06, |
| "loss": 0.189, |
| "step": 1731 |
| }, |
| { |
| "epoch": 1.0561232270855574, |
| "grad_norm": 0.24018281207640957, |
| "learning_rate": 5.388051558963904e-06, |
| "loss": 0.208, |
| "step": 1732 |
| }, |
| { |
| "epoch": 1.0567332621625742, |
| "grad_norm": 0.22159660079783608, |
| "learning_rate": 5.38274626418248e-06, |
| "loss": 0.1988, |
| "step": 1733 |
| }, |
| { |
| "epoch": 1.0573432972395913, |
| "grad_norm": 0.23295975100519303, |
| "learning_rate": 5.377440535912177e-06, |
| "loss": 0.2113, |
| "step": 1734 |
| }, |
| { |
| "epoch": 1.0579533323166082, |
| "grad_norm": 0.23448234332441525, |
| "learning_rate": 5.372134380162133e-06, |
| "loss": 0.2192, |
| "step": 1735 |
| }, |
| { |
| "epoch": 1.0585633673936252, |
| "grad_norm": 0.24268607330157105, |
| "learning_rate": 5.366827802941968e-06, |
| "loss": 0.2068, |
| "step": 1736 |
| }, |
| { |
| "epoch": 1.059173402470642, |
| "grad_norm": 0.2279938492718881, |
| "learning_rate": 5.361520810261779e-06, |
| "loss": 0.2069, |
| "step": 1737 |
| }, |
| { |
| "epoch": 1.059783437547659, |
| "grad_norm": 0.23729437606366588, |
| "learning_rate": 5.356213408132131e-06, |
| "loss": 0.2152, |
| "step": 1738 |
| }, |
| { |
| "epoch": 1.060393472624676, |
| "grad_norm": 0.22743666468075752, |
| "learning_rate": 5.3509056025640575e-06, |
| "loss": 0.2154, |
| "step": 1739 |
| }, |
| { |
| "epoch": 1.0610035077016928, |
| "grad_norm": 0.22491802092028118, |
| "learning_rate": 5.345597399569047e-06, |
| "loss": 0.217, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.0616135427787097, |
| "grad_norm": 0.22367511029922768, |
| "learning_rate": 5.340288805159037e-06, |
| "loss": 0.2202, |
| "step": 1741 |
| }, |
| { |
| "epoch": 1.0622235778557267, |
| "grad_norm": 0.22623616900798266, |
| "learning_rate": 5.334979825346409e-06, |
| "loss": 0.201, |
| "step": 1742 |
| }, |
| { |
| "epoch": 1.0628336129327436, |
| "grad_norm": 0.22928684429359641, |
| "learning_rate": 5.329670466143981e-06, |
| "loss": 0.2082, |
| "step": 1743 |
| }, |
| { |
| "epoch": 1.0634436480097607, |
| "grad_norm": 0.25200001642666464, |
| "learning_rate": 5.3243607335650014e-06, |
| "loss": 0.2215, |
| "step": 1744 |
| }, |
| { |
| "epoch": 1.0640536830867775, |
| "grad_norm": 0.23013795665760164, |
| "learning_rate": 5.319050633623141e-06, |
| "loss": 0.2219, |
| "step": 1745 |
| }, |
| { |
| "epoch": 1.0646637181637943, |
| "grad_norm": 0.23397812159289222, |
| "learning_rate": 5.3137401723324885e-06, |
| "loss": 0.2324, |
| "step": 1746 |
| }, |
| { |
| "epoch": 1.0652737532408114, |
| "grad_norm": 0.22448906533672924, |
| "learning_rate": 5.308429355707538e-06, |
| "loss": 0.2243, |
| "step": 1747 |
| }, |
| { |
| "epoch": 1.0658837883178283, |
| "grad_norm": 0.2268116613353298, |
| "learning_rate": 5.303118189763187e-06, |
| "loss": 0.1987, |
| "step": 1748 |
| }, |
| { |
| "epoch": 1.066493823394845, |
| "grad_norm": 0.25510457913585216, |
| "learning_rate": 5.297806680514731e-06, |
| "loss": 0.207, |
| "step": 1749 |
| }, |
| { |
| "epoch": 1.0671038584718622, |
| "grad_norm": 0.23858305824485018, |
| "learning_rate": 5.292494833977853e-06, |
| "loss": 0.202, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.067713893548879, |
| "grad_norm": 0.22279142082214048, |
| "learning_rate": 5.287182656168618e-06, |
| "loss": 0.204, |
| "step": 1751 |
| }, |
| { |
| "epoch": 1.068323928625896, |
| "grad_norm": 0.22140767823445726, |
| "learning_rate": 5.281870153103464e-06, |
| "loss": 0.2168, |
| "step": 1752 |
| }, |
| { |
| "epoch": 1.068933963702913, |
| "grad_norm": 0.24279225254617254, |
| "learning_rate": 5.276557330799203e-06, |
| "loss": 0.2199, |
| "step": 1753 |
| }, |
| { |
| "epoch": 1.0695439987799298, |
| "grad_norm": 0.22987657138259443, |
| "learning_rate": 5.271244195273002e-06, |
| "loss": 0.2179, |
| "step": 1754 |
| }, |
| { |
| "epoch": 1.0701540338569469, |
| "grad_norm": 0.2242483145934005, |
| "learning_rate": 5.2659307525423895e-06, |
| "loss": 0.1995, |
| "step": 1755 |
| }, |
| { |
| "epoch": 1.0707640689339637, |
| "grad_norm": 0.23595573283445498, |
| "learning_rate": 5.260617008625235e-06, |
| "loss": 0.2001, |
| "step": 1756 |
| }, |
| { |
| "epoch": 1.0713741040109805, |
| "grad_norm": 0.2291741674921307, |
| "learning_rate": 5.255302969539753e-06, |
| "loss": 0.2034, |
| "step": 1757 |
| }, |
| { |
| "epoch": 1.0719841390879976, |
| "grad_norm": 0.22887782966092776, |
| "learning_rate": 5.2499886413044934e-06, |
| "loss": 0.2059, |
| "step": 1758 |
| }, |
| { |
| "epoch": 1.0725941741650145, |
| "grad_norm": 0.22360670111873499, |
| "learning_rate": 5.24467402993833e-06, |
| "loss": 0.2058, |
| "step": 1759 |
| }, |
| { |
| "epoch": 1.0732042092420313, |
| "grad_norm": 0.24012499144382562, |
| "learning_rate": 5.239359141460461e-06, |
| "loss": 0.2179, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.0738142443190484, |
| "grad_norm": 0.31511244352811957, |
| "learning_rate": 5.234043981890395e-06, |
| "loss": 0.2167, |
| "step": 1761 |
| }, |
| { |
| "epoch": 1.0744242793960652, |
| "grad_norm": 0.2266221058824141, |
| "learning_rate": 5.2287285572479475e-06, |
| "loss": 0.2051, |
| "step": 1762 |
| }, |
| { |
| "epoch": 1.0750343144730823, |
| "grad_norm": 0.22440582251739094, |
| "learning_rate": 5.22341287355324e-06, |
| "loss": 0.2028, |
| "step": 1763 |
| }, |
| { |
| "epoch": 1.0756443495500991, |
| "grad_norm": 0.2157945368902979, |
| "learning_rate": 5.218096936826681e-06, |
| "loss": 0.1913, |
| "step": 1764 |
| }, |
| { |
| "epoch": 1.076254384627116, |
| "grad_norm": 0.22696069047564313, |
| "learning_rate": 5.212780753088968e-06, |
| "loss": 0.2161, |
| "step": 1765 |
| }, |
| { |
| "epoch": 1.076864419704133, |
| "grad_norm": 0.22411271328064739, |
| "learning_rate": 5.207464328361078e-06, |
| "loss": 0.2134, |
| "step": 1766 |
| }, |
| { |
| "epoch": 1.07747445478115, |
| "grad_norm": 0.2163020875646112, |
| "learning_rate": 5.202147668664264e-06, |
| "loss": 0.207, |
| "step": 1767 |
| }, |
| { |
| "epoch": 1.078084489858167, |
| "grad_norm": 0.25491316943478276, |
| "learning_rate": 5.196830780020038e-06, |
| "loss": 0.2255, |
| "step": 1768 |
| }, |
| { |
| "epoch": 1.0786945249351838, |
| "grad_norm": 0.24006939574592479, |
| "learning_rate": 5.191513668450178e-06, |
| "loss": 0.2065, |
| "step": 1769 |
| }, |
| { |
| "epoch": 1.0793045600122007, |
| "grad_norm": 0.22565303602624653, |
| "learning_rate": 5.186196339976711e-06, |
| "loss": 0.2142, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.0799145950892177, |
| "grad_norm": 0.23431241380313964, |
| "learning_rate": 5.180878800621911e-06, |
| "loss": 0.224, |
| "step": 1771 |
| }, |
| { |
| "epoch": 1.0805246301662346, |
| "grad_norm": 0.2368388255119797, |
| "learning_rate": 5.175561056408291e-06, |
| "loss": 0.2057, |
| "step": 1772 |
| }, |
| { |
| "epoch": 1.0811346652432514, |
| "grad_norm": 0.2286738585549779, |
| "learning_rate": 5.170243113358594e-06, |
| "loss": 0.1967, |
| "step": 1773 |
| }, |
| { |
| "epoch": 1.0817447003202685, |
| "grad_norm": 0.23447789272378972, |
| "learning_rate": 5.164924977495792e-06, |
| "loss": 0.2247, |
| "step": 1774 |
| }, |
| { |
| "epoch": 1.0823547353972853, |
| "grad_norm": 0.2287799320408433, |
| "learning_rate": 5.1596066548430725e-06, |
| "loss": 0.2125, |
| "step": 1775 |
| }, |
| { |
| "epoch": 1.0829647704743022, |
| "grad_norm": 0.22061561139206476, |
| "learning_rate": 5.154288151423833e-06, |
| "loss": 0.1934, |
| "step": 1776 |
| }, |
| { |
| "epoch": 1.0835748055513192, |
| "grad_norm": 0.23075046956961687, |
| "learning_rate": 5.1489694732616805e-06, |
| "loss": 0.2154, |
| "step": 1777 |
| }, |
| { |
| "epoch": 1.084184840628336, |
| "grad_norm": 0.22967293413928785, |
| "learning_rate": 5.143650626380417e-06, |
| "loss": 0.2212, |
| "step": 1778 |
| }, |
| { |
| "epoch": 1.0847948757053532, |
| "grad_norm": 0.22575905174950292, |
| "learning_rate": 5.138331616804034e-06, |
| "loss": 0.198, |
| "step": 1779 |
| }, |
| { |
| "epoch": 1.08540491078237, |
| "grad_norm": 0.2362401899413697, |
| "learning_rate": 5.133012450556709e-06, |
| "loss": 0.2041, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.0860149458593868, |
| "grad_norm": 0.22921901774320988, |
| "learning_rate": 5.127693133662801e-06, |
| "loss": 0.2207, |
| "step": 1781 |
| }, |
| { |
| "epoch": 1.086624980936404, |
| "grad_norm": 0.2397430325638046, |
| "learning_rate": 5.122373672146831e-06, |
| "loss": 0.2218, |
| "step": 1782 |
| }, |
| { |
| "epoch": 1.0872350160134208, |
| "grad_norm": 0.2441605988712309, |
| "learning_rate": 5.117054072033492e-06, |
| "loss": 0.2252, |
| "step": 1783 |
| }, |
| { |
| "epoch": 1.0878450510904376, |
| "grad_norm": 0.23041025603342433, |
| "learning_rate": 5.111734339347629e-06, |
| "loss": 0.2101, |
| "step": 1784 |
| }, |
| { |
| "epoch": 1.0884550861674547, |
| "grad_norm": 0.210872259108622, |
| "learning_rate": 5.106414480114238e-06, |
| "loss": 0.2044, |
| "step": 1785 |
| }, |
| { |
| "epoch": 1.0890651212444715, |
| "grad_norm": 0.2315607312022513, |
| "learning_rate": 5.10109450035846e-06, |
| "loss": 0.2214, |
| "step": 1786 |
| }, |
| { |
| "epoch": 1.0896751563214884, |
| "grad_norm": 0.2208674173967462, |
| "learning_rate": 5.095774406105572e-06, |
| "loss": 0.2031, |
| "step": 1787 |
| }, |
| { |
| "epoch": 1.0902851913985054, |
| "grad_norm": 0.23095765951855177, |
| "learning_rate": 5.0904542033809774e-06, |
| "loss": 0.1979, |
| "step": 1788 |
| }, |
| { |
| "epoch": 1.0908952264755223, |
| "grad_norm": 0.21581001155371557, |
| "learning_rate": 5.085133898210208e-06, |
| "loss": 0.2006, |
| "step": 1789 |
| }, |
| { |
| "epoch": 1.0915052615525394, |
| "grad_norm": 0.23434786464295787, |
| "learning_rate": 5.079813496618908e-06, |
| "loss": 0.2127, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.0921152966295562, |
| "grad_norm": 0.23162166178276317, |
| "learning_rate": 5.07449300463283e-06, |
| "loss": 0.2193, |
| "step": 1791 |
| }, |
| { |
| "epoch": 1.092725331706573, |
| "grad_norm": 0.231793141185086, |
| "learning_rate": 5.0691724282778345e-06, |
| "loss": 0.2222, |
| "step": 1792 |
| }, |
| { |
| "epoch": 1.0933353667835901, |
| "grad_norm": 0.2374667122949609, |
| "learning_rate": 5.06385177357987e-06, |
| "loss": 0.2114, |
| "step": 1793 |
| }, |
| { |
| "epoch": 1.093945401860607, |
| "grad_norm": 0.22213391536139876, |
| "learning_rate": 5.058531046564977e-06, |
| "loss": 0.1942, |
| "step": 1794 |
| }, |
| { |
| "epoch": 1.094555436937624, |
| "grad_norm": 0.22243485467096535, |
| "learning_rate": 5.053210253259282e-06, |
| "loss": 0.214, |
| "step": 1795 |
| }, |
| { |
| "epoch": 1.0951654720146409, |
| "grad_norm": 0.24391573079049042, |
| "learning_rate": 5.04788939968898e-06, |
| "loss": 0.2349, |
| "step": 1796 |
| }, |
| { |
| "epoch": 1.0957755070916577, |
| "grad_norm": 0.2358921418298421, |
| "learning_rate": 5.042568491880338e-06, |
| "loss": 0.2295, |
| "step": 1797 |
| }, |
| { |
| "epoch": 1.0963855421686748, |
| "grad_norm": 0.2389177385955543, |
| "learning_rate": 5.0372475358596825e-06, |
| "loss": 0.2196, |
| "step": 1798 |
| }, |
| { |
| "epoch": 1.0969955772456916, |
| "grad_norm": 0.2323273476825324, |
| "learning_rate": 5.031926537653396e-06, |
| "loss": 0.1973, |
| "step": 1799 |
| }, |
| { |
| "epoch": 1.0976056123227085, |
| "grad_norm": 0.23721478174918753, |
| "learning_rate": 5.026605503287911e-06, |
| "loss": 0.1953, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.0982156473997255, |
| "grad_norm": 0.2390291825850533, |
| "learning_rate": 5.021284438789694e-06, |
| "loss": 0.2119, |
| "step": 1801 |
| }, |
| { |
| "epoch": 1.0988256824767424, |
| "grad_norm": 0.23791508453378607, |
| "learning_rate": 5.015963350185253e-06, |
| "loss": 0.2154, |
| "step": 1802 |
| }, |
| { |
| "epoch": 1.0994357175537592, |
| "grad_norm": 0.23144393652698764, |
| "learning_rate": 5.010642243501119e-06, |
| "loss": 0.1874, |
| "step": 1803 |
| }, |
| { |
| "epoch": 1.1000457526307763, |
| "grad_norm": 0.21940967709391074, |
| "learning_rate": 5.005321124763847e-06, |
| "loss": 0.2089, |
| "step": 1804 |
| }, |
| { |
| "epoch": 1.1006557877077932, |
| "grad_norm": 0.21810324102556533, |
| "learning_rate": 5e-06, |
| "loss": 0.2133, |
| "step": 1805 |
| }, |
| { |
| "epoch": 1.1012658227848102, |
| "grad_norm": 0.24532717574886334, |
| "learning_rate": 4.994678875236155e-06, |
| "loss": 0.2157, |
| "step": 1806 |
| }, |
| { |
| "epoch": 1.101875857861827, |
| "grad_norm": 0.24625319723639874, |
| "learning_rate": 4.989357756498882e-06, |
| "loss": 0.2002, |
| "step": 1807 |
| }, |
| { |
| "epoch": 1.102485892938844, |
| "grad_norm": 0.23395800432045705, |
| "learning_rate": 4.9840366498147495e-06, |
| "loss": 0.2114, |
| "step": 1808 |
| }, |
| { |
| "epoch": 1.103095928015861, |
| "grad_norm": 0.23251422610518932, |
| "learning_rate": 4.9787155612103076e-06, |
| "loss": 0.1838, |
| "step": 1809 |
| }, |
| { |
| "epoch": 1.1037059630928778, |
| "grad_norm": 0.24423256577250954, |
| "learning_rate": 4.9733944967120905e-06, |
| "loss": 0.2311, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.1043159981698947, |
| "grad_norm": 0.24075558744754294, |
| "learning_rate": 4.968073462346605e-06, |
| "loss": 0.2249, |
| "step": 1811 |
| }, |
| { |
| "epoch": 1.1049260332469117, |
| "grad_norm": 0.2243831584670897, |
| "learning_rate": 4.962752464140318e-06, |
| "loss": 0.2164, |
| "step": 1812 |
| }, |
| { |
| "epoch": 1.1055360683239286, |
| "grad_norm": 0.21557211649475622, |
| "learning_rate": 4.9574315081196634e-06, |
| "loss": 0.1978, |
| "step": 1813 |
| }, |
| { |
| "epoch": 1.1061461034009457, |
| "grad_norm": 0.2254521241476352, |
| "learning_rate": 4.952110600311021e-06, |
| "loss": 0.198, |
| "step": 1814 |
| }, |
| { |
| "epoch": 1.1067561384779625, |
| "grad_norm": 0.22564870909496654, |
| "learning_rate": 4.94678974674072e-06, |
| "loss": 0.2162, |
| "step": 1815 |
| }, |
| { |
| "epoch": 1.1073661735549793, |
| "grad_norm": 0.23231721978554046, |
| "learning_rate": 4.941468953435024e-06, |
| "loss": 0.1975, |
| "step": 1816 |
| }, |
| { |
| "epoch": 1.1079762086319964, |
| "grad_norm": 0.22830426660121386, |
| "learning_rate": 4.936148226420133e-06, |
| "loss": 0.1927, |
| "step": 1817 |
| }, |
| { |
| "epoch": 1.1085862437090133, |
| "grad_norm": 0.21468125174644645, |
| "learning_rate": 4.930827571722167e-06, |
| "loss": 0.1869, |
| "step": 1818 |
| }, |
| { |
| "epoch": 1.10919627878603, |
| "grad_norm": 0.23170361384957827, |
| "learning_rate": 4.92550699536717e-06, |
| "loss": 0.2112, |
| "step": 1819 |
| }, |
| { |
| "epoch": 1.1098063138630472, |
| "grad_norm": 0.22508120264253859, |
| "learning_rate": 4.9201865033810934e-06, |
| "loss": 0.205, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.110416348940064, |
| "grad_norm": 0.2277555513898493, |
| "learning_rate": 4.914866101789793e-06, |
| "loss": 0.2276, |
| "step": 1821 |
| }, |
| { |
| "epoch": 1.111026384017081, |
| "grad_norm": 0.21958491724982818, |
| "learning_rate": 4.909545796619023e-06, |
| "loss": 0.2094, |
| "step": 1822 |
| }, |
| { |
| "epoch": 1.111636419094098, |
| "grad_norm": 0.2281758250224125, |
| "learning_rate": 4.90422559389443e-06, |
| "loss": 0.2064, |
| "step": 1823 |
| }, |
| { |
| "epoch": 1.1122464541711148, |
| "grad_norm": 0.22903591968619846, |
| "learning_rate": 4.8989054996415414e-06, |
| "loss": 0.1996, |
| "step": 1824 |
| }, |
| { |
| "epoch": 1.1128564892481319, |
| "grad_norm": 0.24375202304106894, |
| "learning_rate": 4.893585519885764e-06, |
| "loss": 0.2167, |
| "step": 1825 |
| }, |
| { |
| "epoch": 1.1134665243251487, |
| "grad_norm": 0.2414970821532803, |
| "learning_rate": 4.8882656606523736e-06, |
| "loss": 0.2258, |
| "step": 1826 |
| }, |
| { |
| "epoch": 1.1140765594021655, |
| "grad_norm": 0.25006632120661687, |
| "learning_rate": 4.88294592796651e-06, |
| "loss": 0.229, |
| "step": 1827 |
| }, |
| { |
| "epoch": 1.1146865944791826, |
| "grad_norm": 0.2482403602836149, |
| "learning_rate": 4.8776263278531705e-06, |
| "loss": 0.2231, |
| "step": 1828 |
| }, |
| { |
| "epoch": 1.1152966295561995, |
| "grad_norm": 0.22840236275307774, |
| "learning_rate": 4.8723068663372005e-06, |
| "loss": 0.2209, |
| "step": 1829 |
| }, |
| { |
| "epoch": 1.1159066646332163, |
| "grad_norm": 0.23156238236177693, |
| "learning_rate": 4.866987549443291e-06, |
| "loss": 0.2178, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.1165166997102334, |
| "grad_norm": 0.2479495313346978, |
| "learning_rate": 4.861668383195968e-06, |
| "loss": 0.2329, |
| "step": 1831 |
| }, |
| { |
| "epoch": 1.1171267347872502, |
| "grad_norm": 0.23715382066159454, |
| "learning_rate": 4.856349373619586e-06, |
| "loss": 0.1867, |
| "step": 1832 |
| }, |
| { |
| "epoch": 1.1177367698642673, |
| "grad_norm": 0.2175436696513431, |
| "learning_rate": 4.851030526738321e-06, |
| "loss": 0.201, |
| "step": 1833 |
| }, |
| { |
| "epoch": 1.1183468049412841, |
| "grad_norm": 0.22644845105026176, |
| "learning_rate": 4.8457118485761686e-06, |
| "loss": 0.1985, |
| "step": 1834 |
| }, |
| { |
| "epoch": 1.118956840018301, |
| "grad_norm": 0.23796566819781487, |
| "learning_rate": 4.84039334515693e-06, |
| "loss": 0.2155, |
| "step": 1835 |
| }, |
| { |
| "epoch": 1.119566875095318, |
| "grad_norm": 0.20778757880164975, |
| "learning_rate": 4.835075022504211e-06, |
| "loss": 0.1967, |
| "step": 1836 |
| }, |
| { |
| "epoch": 1.120176910172335, |
| "grad_norm": 0.2261588006945494, |
| "learning_rate": 4.829756886641408e-06, |
| "loss": 0.2077, |
| "step": 1837 |
| }, |
| { |
| "epoch": 1.1207869452493517, |
| "grad_norm": 0.23049171062789117, |
| "learning_rate": 4.82443894359171e-06, |
| "loss": 0.2111, |
| "step": 1838 |
| }, |
| { |
| "epoch": 1.1213969803263688, |
| "grad_norm": 0.2372692688283098, |
| "learning_rate": 4.819121199378091e-06, |
| "loss": 0.2097, |
| "step": 1839 |
| }, |
| { |
| "epoch": 1.1220070154033857, |
| "grad_norm": 0.2137083466863717, |
| "learning_rate": 4.81380366002329e-06, |
| "loss": 0.1939, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.1226170504804027, |
| "grad_norm": 0.23852721077040967, |
| "learning_rate": 4.808486331549824e-06, |
| "loss": 0.2352, |
| "step": 1841 |
| }, |
| { |
| "epoch": 1.1232270855574196, |
| "grad_norm": 0.22148119954075798, |
| "learning_rate": 4.803169219979963e-06, |
| "loss": 0.2183, |
| "step": 1842 |
| }, |
| { |
| "epoch": 1.1238371206344364, |
| "grad_norm": 0.22592558525037146, |
| "learning_rate": 4.797852331335739e-06, |
| "loss": 0.1963, |
| "step": 1843 |
| }, |
| { |
| "epoch": 1.1244471557114535, |
| "grad_norm": 0.2353605626350167, |
| "learning_rate": 4.7925356716389235e-06, |
| "loss": 0.2207, |
| "step": 1844 |
| }, |
| { |
| "epoch": 1.1250571907884703, |
| "grad_norm": 0.2297212807928882, |
| "learning_rate": 4.787219246911034e-06, |
| "loss": 0.2177, |
| "step": 1845 |
| }, |
| { |
| "epoch": 1.1256672258654872, |
| "grad_norm": 0.2176316617932621, |
| "learning_rate": 4.781903063173321e-06, |
| "loss": 0.1829, |
| "step": 1846 |
| }, |
| { |
| "epoch": 1.1262772609425042, |
| "grad_norm": 0.22958634224819682, |
| "learning_rate": 4.776587126446761e-06, |
| "loss": 0.2042, |
| "step": 1847 |
| }, |
| { |
| "epoch": 1.126887296019521, |
| "grad_norm": 0.23047338285747526, |
| "learning_rate": 4.771271442752053e-06, |
| "loss": 0.2153, |
| "step": 1848 |
| }, |
| { |
| "epoch": 1.1274973310965382, |
| "grad_norm": 0.24143457652149827, |
| "learning_rate": 4.765956018109607e-06, |
| "loss": 0.2189, |
| "step": 1849 |
| }, |
| { |
| "epoch": 1.128107366173555, |
| "grad_norm": 0.21811634648251788, |
| "learning_rate": 4.760640858539541e-06, |
| "loss": 0.2038, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.1287174012505719, |
| "grad_norm": 0.22559255230976394, |
| "learning_rate": 4.755325970061671e-06, |
| "loss": 0.2027, |
| "step": 1851 |
| }, |
| { |
| "epoch": 1.129327436327589, |
| "grad_norm": 0.22981316462642773, |
| "learning_rate": 4.750011358695508e-06, |
| "loss": 0.2089, |
| "step": 1852 |
| }, |
| { |
| "epoch": 1.1299374714046058, |
| "grad_norm": 0.22879068330224503, |
| "learning_rate": 4.744697030460248e-06, |
| "loss": 0.2044, |
| "step": 1853 |
| }, |
| { |
| "epoch": 1.1305475064816226, |
| "grad_norm": 0.2072099344556385, |
| "learning_rate": 4.739382991374767e-06, |
| "loss": 0.1959, |
| "step": 1854 |
| }, |
| { |
| "epoch": 1.1311575415586397, |
| "grad_norm": 0.23776541076645313, |
| "learning_rate": 4.734069247457613e-06, |
| "loss": 0.1942, |
| "step": 1855 |
| }, |
| { |
| "epoch": 1.1317675766356565, |
| "grad_norm": 0.23016709695257465, |
| "learning_rate": 4.728755804726998e-06, |
| "loss": 0.2249, |
| "step": 1856 |
| }, |
| { |
| "epoch": 1.1323776117126734, |
| "grad_norm": 0.22533949944245424, |
| "learning_rate": 4.7234426692007985e-06, |
| "loss": 0.2008, |
| "step": 1857 |
| }, |
| { |
| "epoch": 1.1329876467896904, |
| "grad_norm": 0.21749457157915061, |
| "learning_rate": 4.718129846896538e-06, |
| "loss": 0.1991, |
| "step": 1858 |
| }, |
| { |
| "epoch": 1.1335976818667073, |
| "grad_norm": 0.22038974282764345, |
| "learning_rate": 4.712817343831384e-06, |
| "loss": 0.1961, |
| "step": 1859 |
| }, |
| { |
| "epoch": 1.1342077169437244, |
| "grad_norm": 0.2386468105906248, |
| "learning_rate": 4.707505166022149e-06, |
| "loss": 0.2112, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.1348177520207412, |
| "grad_norm": 0.24323963813817923, |
| "learning_rate": 4.702193319485271e-06, |
| "loss": 0.2062, |
| "step": 1861 |
| }, |
| { |
| "epoch": 1.135427787097758, |
| "grad_norm": 0.24281596413325793, |
| "learning_rate": 4.696881810236815e-06, |
| "loss": 0.2237, |
| "step": 1862 |
| }, |
| { |
| "epoch": 1.1360378221747751, |
| "grad_norm": 0.24077278804132538, |
| "learning_rate": 4.691570644292464e-06, |
| "loss": 0.2183, |
| "step": 1863 |
| }, |
| { |
| "epoch": 1.136647857251792, |
| "grad_norm": 0.22606116032964318, |
| "learning_rate": 4.686259827667514e-06, |
| "loss": 0.2098, |
| "step": 1864 |
| }, |
| { |
| "epoch": 1.137257892328809, |
| "grad_norm": 0.22888289623334254, |
| "learning_rate": 4.680949366376858e-06, |
| "loss": 0.2041, |
| "step": 1865 |
| }, |
| { |
| "epoch": 1.1378679274058259, |
| "grad_norm": 0.23305603580105133, |
| "learning_rate": 4.6756392664349985e-06, |
| "loss": 0.2023, |
| "step": 1866 |
| }, |
| { |
| "epoch": 1.1384779624828427, |
| "grad_norm": 0.24371372538251743, |
| "learning_rate": 4.67032953385602e-06, |
| "loss": 0.2082, |
| "step": 1867 |
| }, |
| { |
| "epoch": 1.1390879975598598, |
| "grad_norm": 0.22396952559232805, |
| "learning_rate": 4.6650201746535926e-06, |
| "loss": 0.2068, |
| "step": 1868 |
| }, |
| { |
| "epoch": 1.1396980326368766, |
| "grad_norm": 0.22634464794146542, |
| "learning_rate": 4.659711194840964e-06, |
| "loss": 0.2028, |
| "step": 1869 |
| }, |
| { |
| "epoch": 1.1403080677138935, |
| "grad_norm": 0.2240492344171807, |
| "learning_rate": 4.654402600430955e-06, |
| "loss": 0.2071, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.1409181027909105, |
| "grad_norm": 0.23557956266916885, |
| "learning_rate": 4.649094397435944e-06, |
| "loss": 0.2007, |
| "step": 1871 |
| }, |
| { |
| "epoch": 1.1415281378679274, |
| "grad_norm": 0.22438100866093574, |
| "learning_rate": 4.643786591867871e-06, |
| "loss": 0.209, |
| "step": 1872 |
| }, |
| { |
| "epoch": 1.1421381729449442, |
| "grad_norm": 0.24906239130894264, |
| "learning_rate": 4.638479189738224e-06, |
| "loss": 0.2175, |
| "step": 1873 |
| }, |
| { |
| "epoch": 1.1427482080219613, |
| "grad_norm": 0.24332170113088764, |
| "learning_rate": 4.633172197058034e-06, |
| "loss": 0.2238, |
| "step": 1874 |
| }, |
| { |
| "epoch": 1.1433582430989782, |
| "grad_norm": 0.24344778268910833, |
| "learning_rate": 4.6278656198378665e-06, |
| "loss": 0.2241, |
| "step": 1875 |
| }, |
| { |
| "epoch": 1.1439682781759952, |
| "grad_norm": 0.2383508238526649, |
| "learning_rate": 4.622559464087824e-06, |
| "loss": 0.2297, |
| "step": 1876 |
| }, |
| { |
| "epoch": 1.144578313253012, |
| "grad_norm": 0.22412248975169005, |
| "learning_rate": 4.617253735817522e-06, |
| "loss": 0.2078, |
| "step": 1877 |
| }, |
| { |
| "epoch": 1.145188348330029, |
| "grad_norm": 0.23701589723417668, |
| "learning_rate": 4.611948441036098e-06, |
| "loss": 0.2321, |
| "step": 1878 |
| }, |
| { |
| "epoch": 1.145798383407046, |
| "grad_norm": 0.2340242843088781, |
| "learning_rate": 4.606643585752195e-06, |
| "loss": 0.2103, |
| "step": 1879 |
| }, |
| { |
| "epoch": 1.1464084184840628, |
| "grad_norm": 0.23920144889311484, |
| "learning_rate": 4.6013391759739615e-06, |
| "loss": 0.2456, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.1470184535610797, |
| "grad_norm": 0.21459534836538638, |
| "learning_rate": 4.596035217709039e-06, |
| "loss": 0.1895, |
| "step": 1881 |
| }, |
| { |
| "epoch": 1.1476284886380967, |
| "grad_norm": 0.22716162499913228, |
| "learning_rate": 4.590731716964559e-06, |
| "loss": 0.2271, |
| "step": 1882 |
| }, |
| { |
| "epoch": 1.1482385237151136, |
| "grad_norm": 0.244394988171428, |
| "learning_rate": 4.585428679747133e-06, |
| "loss": 0.2111, |
| "step": 1883 |
| }, |
| { |
| "epoch": 1.1488485587921304, |
| "grad_norm": 0.21851983087595445, |
| "learning_rate": 4.580126112062847e-06, |
| "loss": 0.192, |
| "step": 1884 |
| }, |
| { |
| "epoch": 1.1494585938691475, |
| "grad_norm": 0.23185076923355083, |
| "learning_rate": 4.574824019917262e-06, |
| "loss": 0.2131, |
| "step": 1885 |
| }, |
| { |
| "epoch": 1.1500686289461644, |
| "grad_norm": 0.22895978103203585, |
| "learning_rate": 4.569522409315392e-06, |
| "loss": 0.2078, |
| "step": 1886 |
| }, |
| { |
| "epoch": 1.1506786640231814, |
| "grad_norm": 0.23953049315851418, |
| "learning_rate": 4.564221286261709e-06, |
| "loss": 0.2145, |
| "step": 1887 |
| }, |
| { |
| "epoch": 1.1512886991001983, |
| "grad_norm": 0.22458440085000345, |
| "learning_rate": 4.558920656760135e-06, |
| "loss": 0.2118, |
| "step": 1888 |
| }, |
| { |
| "epoch": 1.1518987341772151, |
| "grad_norm": 0.22196229519951427, |
| "learning_rate": 4.553620526814029e-06, |
| "loss": 0.2158, |
| "step": 1889 |
| }, |
| { |
| "epoch": 1.1525087692542322, |
| "grad_norm": 0.2926714766657743, |
| "learning_rate": 4.548320902426186e-06, |
| "loss": 0.2081, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.153118804331249, |
| "grad_norm": 0.2637006579000516, |
| "learning_rate": 4.543021789598831e-06, |
| "loss": 0.2046, |
| "step": 1891 |
| }, |
| { |
| "epoch": 1.153728839408266, |
| "grad_norm": 0.2303464141889435, |
| "learning_rate": 4.537723194333603e-06, |
| "loss": 0.2096, |
| "step": 1892 |
| }, |
| { |
| "epoch": 1.154338874485283, |
| "grad_norm": 0.22616168232534192, |
| "learning_rate": 4.532425122631559e-06, |
| "loss": 0.2195, |
| "step": 1893 |
| }, |
| { |
| "epoch": 1.1549489095622998, |
| "grad_norm": 0.22958441424908974, |
| "learning_rate": 4.527127580493167e-06, |
| "loss": 0.2152, |
| "step": 1894 |
| }, |
| { |
| "epoch": 1.1555589446393169, |
| "grad_norm": 0.23206621665711868, |
| "learning_rate": 4.521830573918289e-06, |
| "loss": 0.2056, |
| "step": 1895 |
| }, |
| { |
| "epoch": 1.1561689797163337, |
| "grad_norm": 0.24162175497638366, |
| "learning_rate": 4.5165341089061825e-06, |
| "loss": 0.2112, |
| "step": 1896 |
| }, |
| { |
| "epoch": 1.1567790147933505, |
| "grad_norm": 0.2236351442320407, |
| "learning_rate": 4.511238191455491e-06, |
| "loss": 0.1935, |
| "step": 1897 |
| }, |
| { |
| "epoch": 1.1573890498703676, |
| "grad_norm": 0.2454020762399105, |
| "learning_rate": 4.505942827564242e-06, |
| "loss": 0.2265, |
| "step": 1898 |
| }, |
| { |
| "epoch": 1.1579990849473845, |
| "grad_norm": 0.26020678242989564, |
| "learning_rate": 4.500648023229828e-06, |
| "loss": 0.211, |
| "step": 1899 |
| }, |
| { |
| "epoch": 1.1586091200244013, |
| "grad_norm": 0.22806777598404518, |
| "learning_rate": 4.495353784449015e-06, |
| "loss": 0.1971, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.1592191551014184, |
| "grad_norm": 0.22375142681862864, |
| "learning_rate": 4.490060117217925e-06, |
| "loss": 0.2164, |
| "step": 1901 |
| }, |
| { |
| "epoch": 1.1598291901784352, |
| "grad_norm": 0.23047793015134796, |
| "learning_rate": 4.484767027532032e-06, |
| "loss": 0.2272, |
| "step": 1902 |
| }, |
| { |
| "epoch": 1.1604392252554523, |
| "grad_norm": 0.22402678078073376, |
| "learning_rate": 4.479474521386161e-06, |
| "loss": 0.2024, |
| "step": 1903 |
| }, |
| { |
| "epoch": 1.1610492603324691, |
| "grad_norm": 0.24312729205941291, |
| "learning_rate": 4.474182604774471e-06, |
| "loss": 0.2037, |
| "step": 1904 |
| }, |
| { |
| "epoch": 1.161659295409486, |
| "grad_norm": 0.22900584787625933, |
| "learning_rate": 4.468891283690454e-06, |
| "loss": 0.2062, |
| "step": 1905 |
| }, |
| { |
| "epoch": 1.162269330486503, |
| "grad_norm": 0.23554230639630583, |
| "learning_rate": 4.4636005641269294e-06, |
| "loss": 0.2238, |
| "step": 1906 |
| }, |
| { |
| "epoch": 1.16287936556352, |
| "grad_norm": 0.24073626227280503, |
| "learning_rate": 4.458310452076034e-06, |
| "loss": 0.2442, |
| "step": 1907 |
| }, |
| { |
| "epoch": 1.1634894006405367, |
| "grad_norm": 0.2356536112560819, |
| "learning_rate": 4.453020953529217e-06, |
| "loss": 0.1972, |
| "step": 1908 |
| }, |
| { |
| "epoch": 1.1640994357175538, |
| "grad_norm": 0.24903944162427105, |
| "learning_rate": 4.447732074477233e-06, |
| "loss": 0.2303, |
| "step": 1909 |
| }, |
| { |
| "epoch": 1.1647094707945707, |
| "grad_norm": 0.24095256035225399, |
| "learning_rate": 4.442443820910133e-06, |
| "loss": 0.2214, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.1653195058715875, |
| "grad_norm": 0.2390760009355516, |
| "learning_rate": 4.437156198817262e-06, |
| "loss": 0.2072, |
| "step": 1911 |
| }, |
| { |
| "epoch": 1.1659295409486046, |
| "grad_norm": 0.24010310251085357, |
| "learning_rate": 4.431869214187246e-06, |
| "loss": 0.2162, |
| "step": 1912 |
| }, |
| { |
| "epoch": 1.1665395760256214, |
| "grad_norm": 0.2300662707483189, |
| "learning_rate": 4.426582873007999e-06, |
| "loss": 0.2002, |
| "step": 1913 |
| }, |
| { |
| "epoch": 1.1671496111026385, |
| "grad_norm": 0.22383983720254905, |
| "learning_rate": 4.421297181266694e-06, |
| "loss": 0.1819, |
| "step": 1914 |
| }, |
| { |
| "epoch": 1.1677596461796553, |
| "grad_norm": 0.2414352617015131, |
| "learning_rate": 4.4160121449497735e-06, |
| "loss": 0.2185, |
| "step": 1915 |
| }, |
| { |
| "epoch": 1.1683696812566722, |
| "grad_norm": 0.2539875282741942, |
| "learning_rate": 4.410727770042941e-06, |
| "loss": 0.2226, |
| "step": 1916 |
| }, |
| { |
| "epoch": 1.1689797163336892, |
| "grad_norm": 0.24420213569141258, |
| "learning_rate": 4.405444062531145e-06, |
| "loss": 0.2001, |
| "step": 1917 |
| }, |
| { |
| "epoch": 1.169589751410706, |
| "grad_norm": 0.23888115158233167, |
| "learning_rate": 4.400161028398583e-06, |
| "loss": 0.2013, |
| "step": 1918 |
| }, |
| { |
| "epoch": 1.1701997864877232, |
| "grad_norm": 0.24835651886999338, |
| "learning_rate": 4.3948786736286866e-06, |
| "loss": 0.199, |
| "step": 1919 |
| }, |
| { |
| "epoch": 1.17080982156474, |
| "grad_norm": 0.24692728404753747, |
| "learning_rate": 4.389597004204119e-06, |
| "loss": 0.2233, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.1714198566417569, |
| "grad_norm": 0.23560264343648826, |
| "learning_rate": 4.384316026106766e-06, |
| "loss": 0.2128, |
| "step": 1921 |
| }, |
| { |
| "epoch": 1.172029891718774, |
| "grad_norm": 0.2265026408766131, |
| "learning_rate": 4.379035745317734e-06, |
| "loss": 0.1931, |
| "step": 1922 |
| }, |
| { |
| "epoch": 1.1726399267957908, |
| "grad_norm": 0.24621590868324938, |
| "learning_rate": 4.373756167817338e-06, |
| "loss": 0.2067, |
| "step": 1923 |
| }, |
| { |
| "epoch": 1.1732499618728076, |
| "grad_norm": 0.24098881344443027, |
| "learning_rate": 4.368477299585094e-06, |
| "loss": 0.2099, |
| "step": 1924 |
| }, |
| { |
| "epoch": 1.1738599969498247, |
| "grad_norm": 0.2268182569936142, |
| "learning_rate": 4.363199146599717e-06, |
| "loss": 0.1841, |
| "step": 1925 |
| }, |
| { |
| "epoch": 1.1744700320268415, |
| "grad_norm": 0.23457894427475676, |
| "learning_rate": 4.3579217148391115e-06, |
| "loss": 0.2074, |
| "step": 1926 |
| }, |
| { |
| "epoch": 1.1750800671038584, |
| "grad_norm": 0.2538176166214765, |
| "learning_rate": 4.3526450102803654e-06, |
| "loss": 0.2043, |
| "step": 1927 |
| }, |
| { |
| "epoch": 1.1756901021808754, |
| "grad_norm": 0.298440743554529, |
| "learning_rate": 4.347369038899744e-06, |
| "loss": 0.2217, |
| "step": 1928 |
| }, |
| { |
| "epoch": 1.1763001372578923, |
| "grad_norm": 0.22397262376763313, |
| "learning_rate": 4.342093806672678e-06, |
| "loss": 0.2075, |
| "step": 1929 |
| }, |
| { |
| "epoch": 1.1769101723349094, |
| "grad_norm": 0.2497064497376091, |
| "learning_rate": 4.336819319573764e-06, |
| "loss": 0.2336, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.1775202074119262, |
| "grad_norm": 0.2171700331359755, |
| "learning_rate": 4.331545583576758e-06, |
| "loss": 0.1868, |
| "step": 1931 |
| }, |
| { |
| "epoch": 1.178130242488943, |
| "grad_norm": 0.2381864903430019, |
| "learning_rate": 4.32627260465456e-06, |
| "loss": 0.1959, |
| "step": 1932 |
| }, |
| { |
| "epoch": 1.1787402775659601, |
| "grad_norm": 0.2420621944986559, |
| "learning_rate": 4.321000388779214e-06, |
| "loss": 0.1993, |
| "step": 1933 |
| }, |
| { |
| "epoch": 1.179350312642977, |
| "grad_norm": 0.23433046760619705, |
| "learning_rate": 4.3157289419219e-06, |
| "loss": 0.2066, |
| "step": 1934 |
| }, |
| { |
| "epoch": 1.1799603477199938, |
| "grad_norm": 0.24468272597979573, |
| "learning_rate": 4.3104582700529295e-06, |
| "loss": 0.2074, |
| "step": 1935 |
| }, |
| { |
| "epoch": 1.1805703827970109, |
| "grad_norm": 0.23079154266496227, |
| "learning_rate": 4.3051883791417325e-06, |
| "loss": 0.2132, |
| "step": 1936 |
| }, |
| { |
| "epoch": 1.1811804178740277, |
| "grad_norm": 0.22032298652505528, |
| "learning_rate": 4.299919275156857e-06, |
| "loss": 0.2085, |
| "step": 1937 |
| }, |
| { |
| "epoch": 1.1817904529510446, |
| "grad_norm": 0.23205968573496338, |
| "learning_rate": 4.294650964065956e-06, |
| "loss": 0.2225, |
| "step": 1938 |
| }, |
| { |
| "epoch": 1.1824004880280616, |
| "grad_norm": 0.24676759799484482, |
| "learning_rate": 4.289383451835789e-06, |
| "loss": 0.2192, |
| "step": 1939 |
| }, |
| { |
| "epoch": 1.1830105231050785, |
| "grad_norm": 0.22110343486421205, |
| "learning_rate": 4.28411674443221e-06, |
| "loss": 0.2005, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.1836205581820956, |
| "grad_norm": 0.23647728822340291, |
| "learning_rate": 4.278850847820161e-06, |
| "loss": 0.2132, |
| "step": 1941 |
| }, |
| { |
| "epoch": 1.1842305932591124, |
| "grad_norm": 0.23142771149859084, |
| "learning_rate": 4.273585767963662e-06, |
| "loss": 0.2076, |
| "step": 1942 |
| }, |
| { |
| "epoch": 1.1848406283361292, |
| "grad_norm": 0.22278997597375186, |
| "learning_rate": 4.2683215108258145e-06, |
| "loss": 0.1969, |
| "step": 1943 |
| }, |
| { |
| "epoch": 1.1854506634131463, |
| "grad_norm": 0.2264849055331267, |
| "learning_rate": 4.263058082368785e-06, |
| "loss": 0.2093, |
| "step": 1944 |
| }, |
| { |
| "epoch": 1.1860606984901632, |
| "grad_norm": 0.23289169571659174, |
| "learning_rate": 4.2577954885537985e-06, |
| "loss": 0.2299, |
| "step": 1945 |
| }, |
| { |
| "epoch": 1.1866707335671802, |
| "grad_norm": 0.2319473383776966, |
| "learning_rate": 4.25253373534114e-06, |
| "loss": 0.2295, |
| "step": 1946 |
| }, |
| { |
| "epoch": 1.187280768644197, |
| "grad_norm": 0.24206179694564797, |
| "learning_rate": 4.247272828690138e-06, |
| "loss": 0.1919, |
| "step": 1947 |
| }, |
| { |
| "epoch": 1.187890803721214, |
| "grad_norm": 0.22777022144636788, |
| "learning_rate": 4.242012774559164e-06, |
| "loss": 0.1958, |
| "step": 1948 |
| }, |
| { |
| "epoch": 1.188500838798231, |
| "grad_norm": 0.24028328921186182, |
| "learning_rate": 4.236753578905627e-06, |
| "loss": 0.2146, |
| "step": 1949 |
| }, |
| { |
| "epoch": 1.1891108738752478, |
| "grad_norm": 0.23051792511004326, |
| "learning_rate": 4.231495247685961e-06, |
| "loss": 0.2044, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.1897209089522647, |
| "grad_norm": 0.22677821945909582, |
| "learning_rate": 4.2262377868556176e-06, |
| "loss": 0.196, |
| "step": 1951 |
| }, |
| { |
| "epoch": 1.1903309440292817, |
| "grad_norm": 0.23956247319151058, |
| "learning_rate": 4.220981202369067e-06, |
| "loss": 0.2218, |
| "step": 1952 |
| }, |
| { |
| "epoch": 1.1909409791062986, |
| "grad_norm": 0.23056731679274411, |
| "learning_rate": 4.215725500179788e-06, |
| "loss": 0.1964, |
| "step": 1953 |
| }, |
| { |
| "epoch": 1.1915510141833154, |
| "grad_norm": 0.22290865769000323, |
| "learning_rate": 4.210470686240255e-06, |
| "loss": 0.2004, |
| "step": 1954 |
| }, |
| { |
| "epoch": 1.1921610492603325, |
| "grad_norm": 0.2370506789467733, |
| "learning_rate": 4.205216766501941e-06, |
| "loss": 0.2192, |
| "step": 1955 |
| }, |
| { |
| "epoch": 1.1927710843373494, |
| "grad_norm": 0.24199429030566116, |
| "learning_rate": 4.199963746915304e-06, |
| "loss": 0.2111, |
| "step": 1956 |
| }, |
| { |
| "epoch": 1.1933811194143664, |
| "grad_norm": 0.24928723046464263, |
| "learning_rate": 4.194711633429782e-06, |
| "loss": 0.2232, |
| "step": 1957 |
| }, |
| { |
| "epoch": 1.1939911544913833, |
| "grad_norm": 0.2261523161741276, |
| "learning_rate": 4.189460431993788e-06, |
| "loss": 0.2226, |
| "step": 1958 |
| }, |
| { |
| "epoch": 1.1946011895684001, |
| "grad_norm": 0.21711436235680392, |
| "learning_rate": 4.184210148554704e-06, |
| "loss": 0.2173, |
| "step": 1959 |
| }, |
| { |
| "epoch": 1.1952112246454172, |
| "grad_norm": 0.2335041594132127, |
| "learning_rate": 4.178960789058869e-06, |
| "loss": 0.2259, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.195821259722434, |
| "grad_norm": 0.22420508190499794, |
| "learning_rate": 4.173712359451576e-06, |
| "loss": 0.2126, |
| "step": 1961 |
| }, |
| { |
| "epoch": 1.1964312947994509, |
| "grad_norm": 0.21841033430406878, |
| "learning_rate": 4.1684648656770655e-06, |
| "loss": 0.2166, |
| "step": 1962 |
| }, |
| { |
| "epoch": 1.197041329876468, |
| "grad_norm": 0.21237709660456103, |
| "learning_rate": 4.16321831367852e-06, |
| "loss": 0.1958, |
| "step": 1963 |
| }, |
| { |
| "epoch": 1.1976513649534848, |
| "grad_norm": 0.2427307708445763, |
| "learning_rate": 4.157972709398051e-06, |
| "loss": 0.2288, |
| "step": 1964 |
| }, |
| { |
| "epoch": 1.1982614000305016, |
| "grad_norm": 0.21122796020184387, |
| "learning_rate": 4.152728058776701e-06, |
| "loss": 0.2015, |
| "step": 1965 |
| }, |
| { |
| "epoch": 1.1988714351075187, |
| "grad_norm": 0.23690829355786672, |
| "learning_rate": 4.14748436775443e-06, |
| "loss": 0.2272, |
| "step": 1966 |
| }, |
| { |
| "epoch": 1.1994814701845355, |
| "grad_norm": 0.22601930005669627, |
| "learning_rate": 4.142241642270109e-06, |
| "loss": 0.2161, |
| "step": 1967 |
| }, |
| { |
| "epoch": 1.2000915052615526, |
| "grad_norm": 0.21905788470891707, |
| "learning_rate": 4.136999888261522e-06, |
| "loss": 0.207, |
| "step": 1968 |
| }, |
| { |
| "epoch": 1.2007015403385695, |
| "grad_norm": 0.23098794767319236, |
| "learning_rate": 4.131759111665349e-06, |
| "loss": 0.2265, |
| "step": 1969 |
| }, |
| { |
| "epoch": 1.2013115754155863, |
| "grad_norm": 0.24001379202195916, |
| "learning_rate": 4.1265193184171605e-06, |
| "loss": 0.2123, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.2019216104926034, |
| "grad_norm": 0.22602298697793693, |
| "learning_rate": 4.121280514451417e-06, |
| "loss": 0.2112, |
| "step": 1971 |
| }, |
| { |
| "epoch": 1.2025316455696202, |
| "grad_norm": 0.22965358583286585, |
| "learning_rate": 4.116042705701457e-06, |
| "loss": 0.2158, |
| "step": 1972 |
| }, |
| { |
| "epoch": 1.2031416806466373, |
| "grad_norm": 0.22836624548685788, |
| "learning_rate": 4.110805898099492e-06, |
| "loss": 0.207, |
| "step": 1973 |
| }, |
| { |
| "epoch": 1.2037517157236541, |
| "grad_norm": 0.20894006566169948, |
| "learning_rate": 4.105570097576601e-06, |
| "loss": 0.1953, |
| "step": 1974 |
| }, |
| { |
| "epoch": 1.204361750800671, |
| "grad_norm": 0.2333801850349286, |
| "learning_rate": 4.100335310062719e-06, |
| "loss": 0.2217, |
| "step": 1975 |
| }, |
| { |
| "epoch": 1.204971785877688, |
| "grad_norm": 0.2314869312834669, |
| "learning_rate": 4.095101541486636e-06, |
| "loss": 0.2302, |
| "step": 1976 |
| }, |
| { |
| "epoch": 1.205581820954705, |
| "grad_norm": 0.2178532501103331, |
| "learning_rate": 4.0898687977759895e-06, |
| "loss": 0.1874, |
| "step": 1977 |
| }, |
| { |
| "epoch": 1.2061918560317217, |
| "grad_norm": 0.22091585188727192, |
| "learning_rate": 4.084637084857254e-06, |
| "loss": 0.2074, |
| "step": 1978 |
| }, |
| { |
| "epoch": 1.2068018911087388, |
| "grad_norm": 0.21637361310776265, |
| "learning_rate": 4.079406408655737e-06, |
| "loss": 0.1906, |
| "step": 1979 |
| }, |
| { |
| "epoch": 1.2074119261857557, |
| "grad_norm": 0.22093702909221938, |
| "learning_rate": 4.0741767750955724e-06, |
| "loss": 0.2205, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.2080219612627725, |
| "grad_norm": 0.2212934988688623, |
| "learning_rate": 4.068948190099711e-06, |
| "loss": 0.2322, |
| "step": 1981 |
| }, |
| { |
| "epoch": 1.2086319963397896, |
| "grad_norm": 0.2365991900597681, |
| "learning_rate": 4.0637206595899206e-06, |
| "loss": 0.2376, |
| "step": 1982 |
| }, |
| { |
| "epoch": 1.2092420314168064, |
| "grad_norm": 0.23556079186727855, |
| "learning_rate": 4.058494189486769e-06, |
| "loss": 0.198, |
| "step": 1983 |
| }, |
| { |
| "epoch": 1.2098520664938235, |
| "grad_norm": 0.21780322654855477, |
| "learning_rate": 4.0532687857096285e-06, |
| "loss": 0.2245, |
| "step": 1984 |
| }, |
| { |
| "epoch": 1.2104621015708403, |
| "grad_norm": 0.2287696706059261, |
| "learning_rate": 4.048044454176658e-06, |
| "loss": 0.209, |
| "step": 1985 |
| }, |
| { |
| "epoch": 1.2110721366478572, |
| "grad_norm": 0.2610764735198003, |
| "learning_rate": 4.042821200804809e-06, |
| "loss": 0.2427, |
| "step": 1986 |
| }, |
| { |
| "epoch": 1.2116821717248742, |
| "grad_norm": 0.22708291802359176, |
| "learning_rate": 4.037599031509806e-06, |
| "loss": 0.2005, |
| "step": 1987 |
| }, |
| { |
| "epoch": 1.212292206801891, |
| "grad_norm": 0.2415172990342089, |
| "learning_rate": 4.032377952206148e-06, |
| "loss": 0.2256, |
| "step": 1988 |
| }, |
| { |
| "epoch": 1.2129022418789082, |
| "grad_norm": 0.24699658239416358, |
| "learning_rate": 4.0271579688071e-06, |
| "loss": 0.2141, |
| "step": 1989 |
| }, |
| { |
| "epoch": 1.213512276955925, |
| "grad_norm": 0.2399879409915332, |
| "learning_rate": 4.021939087224682e-06, |
| "loss": 0.2115, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.2141223120329419, |
| "grad_norm": 0.23415516645716586, |
| "learning_rate": 4.016721313369674e-06, |
| "loss": 0.2271, |
| "step": 1991 |
| }, |
| { |
| "epoch": 1.2147323471099587, |
| "grad_norm": 0.22806628490981498, |
| "learning_rate": 4.011504653151593e-06, |
| "loss": 0.2204, |
| "step": 1992 |
| }, |
| { |
| "epoch": 1.2153423821869758, |
| "grad_norm": 0.2595253670667039, |
| "learning_rate": 4.0062891124787e-06, |
| "loss": 0.2427, |
| "step": 1993 |
| }, |
| { |
| "epoch": 1.2159524172639926, |
| "grad_norm": 0.226343672771304, |
| "learning_rate": 4.0010746972579865e-06, |
| "loss": 0.2081, |
| "step": 1994 |
| }, |
| { |
| "epoch": 1.2165624523410097, |
| "grad_norm": 0.22187314948561185, |
| "learning_rate": 3.995861413395164e-06, |
| "loss": 0.2095, |
| "step": 1995 |
| }, |
| { |
| "epoch": 1.2171724874180265, |
| "grad_norm": 0.2408609051467771, |
| "learning_rate": 3.990649266794676e-06, |
| "loss": 0.2048, |
| "step": 1996 |
| }, |
| { |
| "epoch": 1.2177825224950434, |
| "grad_norm": 0.23466609594422677, |
| "learning_rate": 3.985438263359667e-06, |
| "loss": 0.2155, |
| "step": 1997 |
| }, |
| { |
| "epoch": 1.2183925575720604, |
| "grad_norm": 0.2194120357344308, |
| "learning_rate": 3.9802284089919876e-06, |
| "loss": 0.1925, |
| "step": 1998 |
| }, |
| { |
| "epoch": 1.2190025926490773, |
| "grad_norm": 0.22978200474534835, |
| "learning_rate": 3.975019709592189e-06, |
| "loss": 0.2111, |
| "step": 1999 |
| }, |
| { |
| "epoch": 1.2196126277260944, |
| "grad_norm": 0.22765530069297446, |
| "learning_rate": 3.969812171059516e-06, |
| "loss": 0.1976, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.2196126277260944, |
| "eval_loss": 0.22462303936481476, |
| "eval_runtime": 265.1969, |
| "eval_samples_per_second": 4.035, |
| "eval_steps_per_second": 0.128, |
| "step": 2000 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 3280, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2422457185075200.0, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|