andrew-healey commited on
Commit
1d70f2b
·
verified ·
1 Parent(s): 3d3c68e

Upload folder using huggingface_hub

Browse files
12_head_baseline_lr_6e-4/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/repro_selective_pattern_rankings/12_head_baseline_lr_6e-4", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "repro_selective_pattern_rankings", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 64, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0006, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "12_head_baseline_lr_6e-4"}
12_head_baseline_lr_6e-4/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ec6682852ed60a9d42cb4047300f65a04f87328c0ad7a4516be84d11b28f216
3
+ size 964
12_head_baseline_lr_6e-4/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 10.9275
3
+ 0 val perplexity 55688.3086
4
+ 0 train 10.927717 (lr=8.3916e-07) (hash(x)=93356070)
5
+ 10 train 10.157005 (lr=9.2308e-06) (hash(x)=91253010)
6
+ 20 train 9.639227 (lr=1.7622e-05) (hash(x)=74934453)
7
+ 30 train 9.478985 (lr=2.6014e-05) (hash(x)=79278034)
8
+ 40 train 9.202698 (lr=3.4406e-05) (hash(x)=80107892)
9
+ 50 train 8.920856 (lr=4.2797e-05) (hash(x)=70745428)
10
+ 60 train 8.680676 (lr=5.1189e-05) (hash(x)=80077589)
11
+ 70 train 8.218871 (lr=5.9580e-05) (hash(x)=76213766)
12
+ 80 train 7.973695 (lr=6.7972e-05) (hash(x)=83218328)
13
+ 90 train 7.729426 (lr=7.6364e-05) (hash(x)=74855845)
14
+ 100 val loss 7.5897
15
+ 100 val perplexity 1977.6705
16
+ 100 train 7.465366 (lr=8.4755e-05) (hash(x)=82814902)
17
+ 110 train 7.380744 (lr=9.3147e-05) (hash(x)=84907741)
18
+ 120 train 7.277765 (lr=1.0154e-04) (hash(x)=82613223)
19
+ 130 train 7.125362 (lr=1.0993e-04) (hash(x)=83540876)
20
+ 140 train 7.101282 (lr=1.1832e-04) (hash(x)=75095216)
21
+ 150 train 7.149079 (lr=1.2671e-04) (hash(x)=97190944)
22
+ 160 train 7.014097 (lr=1.3510e-04) (hash(x)=82117809)
23
+ 170 train 6.897202 (lr=1.4350e-04) (hash(x)=70514724)
24
+ 180 train 6.814301 (lr=1.5189e-04) (hash(x)=81029624)
25
+ 190 train 6.696559 (lr=1.6028e-04) (hash(x)=78996841)
26
+ 200 val loss 6.6417
27
+ 200 val perplexity 766.3725
28
+ 200 train 6.645527 (lr=1.6867e-04) (hash(x)=79845097)
29
+ 210 train 6.501446 (lr=1.7706e-04) (hash(x)=78997683)
30
+ 220 train 6.390601 (lr=1.8545e-04) (hash(x)=74895865)
31
+ 230 train 6.567307 (lr=1.9385e-04) (hash(x)=80933276)
32
+ 240 train 6.268052 (lr=2.0224e-04) (hash(x)=77664606)
33
+ 250 train 6.276261 (lr=2.1063e-04) (hash(x)=75903930)
34
+ 260 train 6.179594 (lr=2.1902e-04) (hash(x)=82996853)
35
+ 270 train 6.113348 (lr=2.2741e-04) (hash(x)=73269568)
36
+ 280 train 6.050037 (lr=2.3580e-04) (hash(x)=82906427)
37
+ 290 train 5.968565 (lr=2.4420e-04) (hash(x)=71797895)
38
+ 300 val loss 6.1027
39
+ 300 val perplexity 447.0765
40
+ 300 train 5.830674 (lr=2.5259e-04) (hash(x)=85232249)
41
+ 310 train 5.762503 (lr=2.6098e-04) (hash(x)=75252489)
42
+ 320 train 5.865934 (lr=2.6937e-04) (hash(x)=74394644)
43
+ 330 train 5.930887 (lr=2.7776e-04) (hash(x)=82772910)
44
+ 340 train 5.976114 (lr=2.8615e-04) (hash(x)=81627464)
45
+ 350 train 5.956862 (lr=2.9455e-04) (hash(x)=84678053)
46
+ 360 train 5.903929 (lr=3.0294e-04) (hash(x)=81884128)
47
+ 370 train 5.883206 (lr=3.1133e-04) (hash(x)=76379242)
48
+ 380 train 5.658519 (lr=3.1972e-04) (hash(x)=98182875)
49
+ 390 train 5.666385 (lr=3.2811e-04) (hash(x)=79710436)
50
+ 400 val loss 5.7099
51
+ 400 val perplexity 301.8305
52
+ 400 train 5.665086 (lr=3.3650e-04) (hash(x)=79841071)
53
+ 410 train 5.577646 (lr=3.4490e-04) (hash(x)=75844151)
54
+ 420 train 5.574399 (lr=3.5329e-04) (hash(x)=73125036)
55
+ 430 train 5.618525 (lr=3.6168e-04) (hash(x)=84214858)
56
+ 440 train 5.507903 (lr=3.7007e-04) (hash(x)=80456994)
57
+ 450 train 5.469387 (lr=3.7846e-04) (hash(x)=76735962)
58
+ 460 train 5.405846 (lr=3.8685e-04) (hash(x)=81845446)
59
+ 470 train 5.221576 (lr=3.9524e-04) (hash(x)=76094689)
60
+ 480 train 5.342197 (lr=4.0364e-04) (hash(x)=83806686)
61
+ 490 train 5.167534 (lr=4.1203e-04) (hash(x)=84690227)
62
+ 500 val loss 5.4609
63
+ 500 val perplexity 235.3170
64
+ 500 train 5.220690 (lr=4.2042e-04) (hash(x)=71851938)
65
+ 510 train 5.312815 (lr=4.2881e-04) (hash(x)=77159346)
66
+ 520 train 5.363231 (lr=4.3720e-04) (hash(x)=80755753)
67
+ 530 train 5.466526 (lr=4.4559e-04) (hash(x)=75379679)
68
+ 540 train 5.377451 (lr=4.5399e-04) (hash(x)=82458619)
69
+ 550 train 5.360423 (lr=4.6238e-04) (hash(x)=86366396)
70
+ 560 train 5.338173 (lr=4.7077e-04) (hash(x)=85095044)
71
+ 570 train 5.311355 (lr=4.7916e-04) (hash(x)=78385159)
72
+ 580 train 5.269881 (lr=4.8755e-04) (hash(x)=79342394)
73
+ 590 train 5.264031 (lr=4.9594e-04) (hash(x)=70782192)
74
+ 600 val loss 5.2656
75
+ 600 val perplexity 193.5721
76
+ 600 train 5.237235 (lr=5.0434e-04) (hash(x)=76130353)
77
+ 610 train 5.148084 (lr=5.1273e-04) (hash(x)=74778440)
78
+ 620 train 5.106846 (lr=5.2112e-04) (hash(x)=79129709)
79
+ 630 train 5.008933 (lr=5.2951e-04) (hash(x)=76469962)
80
+ 640 train 5.021534 (lr=5.3790e-04) (hash(x)=78288049)
81
+ 650 train 4.977587 (lr=5.4629e-04) (hash(x)=76641388)
82
+ 660 train 4.879727 (lr=5.5469e-04) (hash(x)=75937906)
83
+ 670 train 4.907400 (lr=5.6308e-04) (hash(x)=74807157)
84
+ 680 train 4.787249 (lr=5.7147e-04) (hash(x)=77490144)
85
+ 690 train 4.818188 (lr=5.7986e-04) (hash(x)=72900124)
86
+ 700 val loss 5.0805
87
+ 700 val perplexity 160.8561
88
+ 700 train 4.789006 (lr=5.8825e-04) (hash(x)=73424218)
89
+ 710 train 5.061962 (lr=5.9664e-04) (hash(x)=86052345)
90
+ 720 train 5.011309 (lr=6.0000e-04) (hash(x)=77462613)
91
+ 730 train 5.016435 (lr=5.9998e-04) (hash(x)=79027471)
92
+ 740 train 4.978360 (lr=5.9994e-04) (hash(x)=78149992)
93
+ 750 train 4.939659 (lr=5.9988e-04) (hash(x)=89147499)
94
+ 760 train 4.939705 (lr=5.9980e-04) (hash(x)=74931538)
95
+ 770 train 4.890773 (lr=5.9970e-04) (hash(x)=89648204)
96
+ 780 train 4.937779 (lr=5.9958e-04) (hash(x)=83708293)
97
+ 790 train 4.927411 (lr=5.9944e-04) (hash(x)=84336512)
98
+ 800 val loss 4.8783
99
+ 800 val perplexity 131.4092
100
+ 800 train 4.806079 (lr=5.9928e-04) (hash(x)=75025285)
101
+ 810 train 4.709286 (lr=5.9910e-04) (hash(x)=79436536)
102
+ 820 train 4.702584 (lr=5.9890e-04) (hash(x)=90467390)
103
+ 830 train 4.746223 (lr=5.9869e-04) (hash(x)=77292020)
104
+ 840 train 4.651864 (lr=5.9845e-04) (hash(x)=75568927)
105
+ 850 train 4.673661 (lr=5.9819e-04) (hash(x)=79671781)
106
+ 860 train 4.747439 (lr=5.9791e-04) (hash(x)=84280943)
107
+ 870 train 4.728676 (lr=5.9761e-04) (hash(x)=80901294)
108
+ 880 train 4.839090 (lr=5.9730e-04) (hash(x)=81437584)
109
+ 890 train 4.692418 (lr=5.9696e-04) (hash(x)=73893778)
110
+ 900 val loss 4.7051
111
+ 900 val perplexity 110.5109
112
+ 900 train 4.626082 (lr=5.9660e-04) (hash(x)=74987794)
113
+ 910 train 4.644361 (lr=5.9623e-04) (hash(x)=93721374)
114
+ 920 train 4.627056 (lr=5.9583e-04) (hash(x)=79149678)
115
+ 930 train 4.569266 (lr=5.9542e-04) (hash(x)=83179387)
116
+ 940 train 4.567439 (lr=5.9498e-04) (hash(x)=89009978)
117
+ 950 train 4.540099 (lr=5.9453e-04) (hash(x)=76483735)
118
+ 960 train 4.472516 (lr=5.9405e-04) (hash(x)=78638850)
119
+ 970 train 4.505652 (lr=5.9356e-04) (hash(x)=90415206)
120
+ 980 train 4.432364 (lr=5.9305e-04) (hash(x)=90819195)
121
+ 990 train 4.557418 (lr=5.9251e-04) (hash(x)=85938852)
122
+ 1000 val loss 4.5439
123
+ 1000 val perplexity 94.0544
124
+ 1000 train 4.593643 (lr=5.9196e-04) (hash(x)=82436789)
125
+ 1010 train 4.631203 (lr=5.9139e-04) (hash(x)=63022149)
126
+ 1020 train 4.533416 (lr=5.9080e-04) (hash(x)=83501199)
127
+ 1030 train 4.526703 (lr=5.9019e-04) (hash(x)=77093326)
128
+ 1040 train 4.390250 (lr=5.8956e-04) (hash(x)=73437559)
129
+ 1050 train 4.482548 (lr=5.8891e-04) (hash(x)=84550388)
130
+ 1060 train 4.478627 (lr=5.8825e-04) (hash(x)=86184566)
131
+ 1070 train 4.463887 (lr=5.8756e-04) (hash(x)=85644922)
132
+ 1080 train 4.398623 (lr=5.8686e-04) (hash(x)=86179801)
133
+ 1090 train 4.365890 (lr=5.8613e-04) (hash(x)=82868303)
134
+ 1100 val loss 4.4390
135
+ 1100 val perplexity 84.6924
136
+ 1100 train 4.466256 (lr=5.8539e-04) (hash(x)=96780388)
137
+ 1110 train 4.403344 (lr=5.8463e-04) (hash(x)=87223122)
138
+ 1120 train 4.362931 (lr=5.8385e-04) (hash(x)=77292786)
139
+ 1130 train 4.385690 (lr=5.8305e-04) (hash(x)=88761375)
140
+ 1140 train 4.295548 (lr=5.8223e-04) (hash(x)=80956468)
141
+ 1150 train 4.462841 (lr=5.8140e-04) (hash(x)=84725462)
142
+ 1160 train 4.443383 (lr=5.8054e-04) (hash(x)=76098113)
143
+ 1170 train 4.420262 (lr=5.7967e-04) (hash(x)=84387685)
144
+ 1180 train 4.357417 (lr=5.7878e-04) (hash(x)=76120321)
145
+ 1190 train 4.323272 (lr=5.7787e-04) (hash(x)=85974065)
146
+ 1200 val loss 4.3421
147
+ 1200 val perplexity 76.8714
148
+ 1200 train 4.224998 (lr=5.7694e-04) (hash(x)=79340644)
149
+ 1210 train 4.319157 (lr=5.7599e-04) (hash(x)=88345025)
150
+ 1220 train 4.219506 (lr=5.7503e-04) (hash(x)=86406230)
151
+ 1230 train 4.205584 (lr=5.7405e-04) (hash(x)=80608975)
152
+ 1240 train 4.262941 (lr=5.7305e-04) (hash(x)=81635225)
153
+ 1250 train 4.096692 (lr=5.7203e-04) (hash(x)=82126376)
154
+ 1260 train 4.269254 (lr=5.7099e-04) (hash(x)=83737972)
155
+ 1270 train 4.178236 (lr=5.6994e-04) (hash(x)=81132345)
156
+ 1280 train 4.262471 (lr=5.6887e-04) (hash(x)=85850404)
157
+ 1290 train 4.267895 (lr=5.6778e-04) (hash(x)=83490640)
158
+ 1300 val loss 4.2785
159
+ 1300 val perplexity 72.1317
160
+ 1300 train 4.247018 (lr=5.6667e-04) (hash(x)=77545187)
161
+ 1310 train 4.292161 (lr=5.6555e-04) (hash(x)=86412685)
162
+ 1320 train 4.185549 (lr=5.6440e-04) (hash(x)=86429640)
163
+ 1330 train 4.317113 (lr=5.6325e-04) (hash(x)=79645281)
164
+ 1340 train 4.271056 (lr=5.6207e-04) (hash(x)=82668541)
165
+ 1350 train 4.140747 (lr=5.6088e-04) (hash(x)=73627248)
166
+ 1360 train 4.237195 (lr=5.5967e-04) (hash(x)=74845133)
167
+ 1370 train 4.145235 (lr=5.5844e-04) (hash(x)=77850497)
168
+ 1380 train 4.166664 (lr=5.5720e-04) (hash(x)=76143954)
169
+ 1390 train 4.177283 (lr=5.5593e-04) (hash(x)=80675544)
170
+ 1400 val loss 4.2317
171
+ 1400 val perplexity 68.8322
172
+ 1400 train 4.144437 (lr=5.5466e-04) (hash(x)=76010938)
173
+ 1410 train 4.182112 (lr=5.5336e-04) (hash(x)=81260300)
174
+ 1420 train 4.298414 (lr=5.5205e-04) (hash(x)=83803629)
175
+ 1430 train 4.126324 (lr=5.5073e-04) (hash(x)=83840110)
176
+ 1440 train 4.318179 (lr=5.4938e-04) (hash(x)=97711831)
177
+ 1450 train 4.192608 (lr=5.4803e-04) (hash(x)=81110139)
178
+ 1460 train 4.119545 (lr=5.4665e-04) (hash(x)=90942839)
179
+ 1470 train 4.156369 (lr=5.4526e-04) (hash(x)=80721564)
180
+ 1480 train 4.248527 (lr=5.4385e-04) (hash(x)=77852059)
181
+ 1490 train 4.143416 (lr=5.4243e-04) (hash(x)=80011365)
182
+ 1500 val loss 4.1795
183
+ 1500 val perplexity 65.3341
184
+ 1500 train 4.172869 (lr=5.4099e-04) (hash(x)=84496142)
185
+ 1510 train 4.105510 (lr=5.3954e-04) (hash(x)=70186729)
186
+ 1520 train 4.127757 (lr=5.3807e-04) (hash(x)=74854227)
187
+ 1530 train 4.034189 (lr=5.3658e-04) (hash(x)=77468161)
188
+ 1540 train 4.044551 (lr=5.3508e-04) (hash(x)=87853059)
189
+ 1550 train 3.984467 (lr=5.3357e-04) (hash(x)=71225436)
190
+ 1560 train 4.080449 (lr=5.3204e-04) (hash(x)=84072783)
191
+ 1570 train 4.125065 (lr=5.3049e-04) (hash(x)=72723098)
192
+ 1580 train 4.152003 (lr=5.2893e-04) (hash(x)=90409866)
193
+ 1590 train 4.218709 (lr=5.2736e-04) (hash(x)=87481378)
194
+ 1600 val loss 4.1386
195
+ 1600 val perplexity 62.7133
196
+ 1600 train 4.203616 (lr=5.2577e-04) (hash(x)=77643862)
197
+ 1610 train 4.081103 (lr=5.2417e-04) (hash(x)=88862575)
198
+ 1620 train 4.097677 (lr=5.2255e-04) (hash(x)=84612581)
199
+ 1630 train 4.139013 (lr=5.2092e-04) (hash(x)=87075989)
200
+ 1640 train 4.018781 (lr=5.1927e-04) (hash(x)=88277361)
201
+ 1650 train 4.136680 (lr=5.1761e-04) (hash(x)=78750236)
202
+ 1660 train 4.050566 (lr=5.1594e-04) (hash(x)=82604581)
203
+ 1670 train 3.894072 (lr=5.1425e-04) (hash(x)=68482265)
204
+ 1680 train 4.017204 (lr=5.1255e-04) (hash(x)=75088835)
205
+ 1690 train 3.964872 (lr=5.1084e-04) (hash(x)=66060989)
206
+ 1700 val loss 4.1213
207
+ 1700 val perplexity 61.6371
208
+ 1700 train 4.119876 (lr=5.0911e-04) (hash(x)=79986754)
209
+ 1710 train 4.094191 (lr=5.0737e-04) (hash(x)=83657930)
210
+ 1720 train 4.035969 (lr=5.0562e-04) (hash(x)=81754135)
211
+ 1730 train 4.135921 (lr=5.0385e-04) (hash(x)=78904427)
212
+ 1740 train 4.030789 (lr=5.0207e-04) (hash(x)=85920177)
213
+ 1750 train 4.089312 (lr=5.0028e-04) (hash(x)=86573211)
214
+ 1760 train 4.044215 (lr=4.9847e-04) (hash(x)=81737128)
215
+ 1770 train 4.033719 (lr=4.9665e-04) (hash(x)=77400968)
216
+ 1780 train 4.044498 (lr=4.9482e-04) (hash(x)=73545497)
217
+ 1790 train 4.305316 (lr=4.9298e-04) (hash(x)=71641943)
218
+ 1800 val loss 4.0700
219
+ 1800 val perplexity 58.5541
220
+ 1800 train 4.141685 (lr=4.9113e-04) (hash(x)=87819781)
221
+ 1810 train 3.998232 (lr=4.8926e-04) (hash(x)=86870770)
222
+ 1820 train 4.010203 (lr=4.8739e-04) (hash(x)=82522211)
223
+ 1830 train 4.099488 (lr=4.8550e-04) (hash(x)=61947437)
224
+ 1840 train 3.967906 (lr=4.8360e-04) (hash(x)=79865406)
225
+ 1850 train 3.930274 (lr=4.8169e-04) (hash(x)=79828721)
226
+ 1860 train 4.070061 (lr=4.7976e-04) (hash(x)=80869571)
227
+ 1870 train 4.061483 (lr=4.7783e-04) (hash(x)=73780971)
228
+ 1880 train 4.017057 (lr=4.7588e-04) (hash(x)=79249549)
229
+ 1890 train 4.075583 (lr=4.7393e-04) (hash(x)=81041904)
230
+ 1900 val loss 4.0442
231
+ 1900 val perplexity 57.0645
232
+ 1900 train 4.147625 (lr=4.7196e-04) (hash(x)=82456430)
233
+ 1910 train 3.900007 (lr=4.6999e-04) (hash(x)=82222135)
234
+ 1920 train 3.947019 (lr=4.6800e-04) (hash(x)=70033249)
235
+ 1930 train 3.933846 (lr=4.6600e-04) (hash(x)=72887360)
236
+ 1940 train 3.982329 (lr=4.6400e-04) (hash(x)=83251100)
237
+ 1950 train 4.018911 (lr=4.6198e-04) (hash(x)=79660266)
238
+ 1960 train 3.902264 (lr=4.5995e-04) (hash(x)=78919068)
239
+ 1970 train 4.013284 (lr=4.5792e-04) (hash(x)=94690431)
240
+ 1980 train 3.827605 (lr=4.5587e-04) (hash(x)=88153756)
241
+ 1990 train 3.887743 (lr=4.5381e-04) (hash(x)=77195688)
242
+ 2000 val loss 4.0142
243
+ 2000 val perplexity 55.3801
244
+ 2000 train 3.970705 (lr=4.5175e-04) (hash(x)=81308591)
245
+ 2010 train 4.027360 (lr=4.4968e-04) (hash(x)=80205479)
246
+ 2020 train 3.944631 (lr=4.4760e-04) (hash(x)=81008704)
247
+ 2030 train 4.008580 (lr=4.4551e-04) (hash(x)=82914358)
248
+ 2040 train 4.018912 (lr=4.4341e-04) (hash(x)=88064399)
249
+ 2050 train 4.030991 (lr=4.4130e-04) (hash(x)=82889390)
250
+ 2060 train 4.097082 (lr=4.3918e-04) (hash(x)=80913554)
251
+ 2070 train 3.930981 (lr=4.3706e-04) (hash(x)=78121791)
252
+ 2080 train 4.068925 (lr=4.3493e-04) (hash(x)=75356657)
253
+ 2090 train 3.898297 (lr=4.3279e-04) (hash(x)=74048412)
254
+ 2100 val loss 3.9821
255
+ 2100 val perplexity 53.6320
256
+ 2100 train 3.815279 (lr=4.3064e-04) (hash(x)=68928225)
257
+ 2110 train 3.870517 (lr=4.2849e-04) (hash(x)=93734745)
258
+ 2120 train 3.870013 (lr=4.2633e-04) (hash(x)=72878164)
259
+ 2130 train 3.996075 (lr=4.2416e-04) (hash(x)=71508226)
260
+ 2140 train 3.948195 (lr=4.2198e-04) (hash(x)=84168671)
261
+ 2150 train 3.876794 (lr=4.1980e-04) (hash(x)=74673639)
262
+ 2160 train 4.026609 (lr=4.1761e-04) (hash(x)=75470031)
263
+ 2170 train 3.988094 (lr=4.1542e-04) (hash(x)=74307890)
264
+ 2180 train 4.108595 (lr=4.1321e-04) (hash(x)=77214245)
265
+ 2190 train 3.883413 (lr=4.1101e-04) (hash(x)=88628359)
266
+ 2200 val loss 3.9608
267
+ 2200 val perplexity 52.4977
268
+ 2200 train 3.908175 (lr=4.0879e-04) (hash(x)=74779126)
269
+ 2210 train 3.948145 (lr=4.0657e-04) (hash(x)=79817976)
270
+ 2220 train 3.891584 (lr=4.0435e-04) (hash(x)=75258996)
271
+ 2230 train 3.873952 (lr=4.0212e-04) (hash(x)=88164047)
272
+ 2240 train 3.850070 (lr=3.9988e-04) (hash(x)=78580686)
273
+ 2250 train 3.780027 (lr=3.9764e-04) (hash(x)=76510617)
274
+ 2260 train 3.862032 (lr=3.9539e-04) (hash(x)=76043966)
275
+ 2270 train 3.841270 (lr=3.9314e-04) (hash(x)=73062098)
276
+ 2280 train 4.063267 (lr=3.9089e-04) (hash(x)=81885909)
277
+ 2290 train 4.000932 (lr=3.8863e-04) (hash(x)=81481741)
278
+ 2300 val loss 3.9344
279
+ 2300 val perplexity 51.1319
280
+ 2300 train 3.856041 (lr=3.8636e-04) (hash(x)=82104275)
281
+ 2310 train 3.901805 (lr=3.8409e-04) (hash(x)=82137309)
282
+ 2320 train 3.881460 (lr=3.8182e-04) (hash(x)=86741167)
283
+ 2330 train 3.893969 (lr=3.7955e-04) (hash(x)=84796263)
284
+ 2340 train 3.890165 (lr=3.7727e-04) (hash(x)=169966529)
285
+ 2350 train 3.756200 (lr=3.7498e-04) (hash(x)=80006061)
286
+ 2360 train 3.793148 (lr=3.7270e-04) (hash(x)=71090999)
287
+ 2370 train 3.925208 (lr=3.7041e-04) (hash(x)=86641850)
288
+ 2380 train 3.824032 (lr=3.6811e-04) (hash(x)=84573253)
289
+ 2390 train 3.866942 (lr=3.6582e-04) (hash(x)=74483764)
290
+ 2400 val loss 3.9214
291
+ 2400 val perplexity 50.4720
292
+ 2400 train 3.840872 (lr=3.6352e-04) (hash(x)=78327659)
293
+ 2410 train 3.903591 (lr=3.6122e-04) (hash(x)=77273627)
294
+ 2420 train 3.769855 (lr=3.5891e-04) (hash(x)=76938049)
295
+ 2430 train 3.768020 (lr=3.5661e-04) (hash(x)=98449442)
296
+ 2440 train 3.634655 (lr=3.5430e-04) (hash(x)=78157797)
297
+ 2450 train 3.667456 (lr=3.5199e-04) (hash(x)=80637582)
298
+ 2460 train 3.648945 (lr=3.4968e-04) (hash(x)=80023854)
299
+ 2470 train 3.722162 (lr=3.4737e-04) (hash(x)=86678884)
300
+ 2480 train 3.937719 (lr=3.4506e-04) (hash(x)=84086469)
301
+ 2490 train 3.944941 (lr=3.4274e-04) (hash(x)=83131141)
302
+ 2500 val loss 3.8947
303
+ 2500 val perplexity 49.1391
304
+ 2500 train 3.866644 (lr=3.4043e-04) (hash(x)=82583497)
305
+ 2510 train 3.936357 (lr=3.3811e-04) (hash(x)=79164326)
306
+ 2520 train 3.844373 (lr=3.3579e-04) (hash(x)=76456503)
307
+ 2530 train 3.904964 (lr=3.3348e-04) (hash(x)=82001228)
308
+ 2540 train 3.934854 (lr=3.3116e-04) (hash(x)=75615595)
309
+ 2550 train 3.886881 (lr=3.2884e-04) (hash(x)=78397869)
310
+ 2560 train 3.892297 (lr=3.2652e-04) (hash(x)=75547032)
311
+ 2570 train 3.796817 (lr=3.2421e-04) (hash(x)=89201025)
312
+ 2580 train 3.783798 (lr=3.2189e-04) (hash(x)=89856704)
313
+ 2590 train 3.802163 (lr=3.1957e-04) (hash(x)=82175682)
314
+ 2600 val loss 3.8741
315
+ 2600 val perplexity 48.1387
316
+ 2600 train 3.816540 (lr=3.1726e-04) (hash(x)=78312826)
317
+ 2610 train 3.791480 (lr=3.1494e-04) (hash(x)=77066588)
318
+ 2620 train 3.626380 (lr=3.1263e-04) (hash(x)=78666061)
319
+ 2630 train 3.630448 (lr=3.1032e-04) (hash(x)=93762143)
320
+ 2640 train 3.660300 (lr=3.0801e-04) (hash(x)=83191587)
321
+ 2650 train 3.567744 (lr=3.0570e-04) (hash(x)=87169585)
322
+ 2660 train 3.774030 (lr=3.0339e-04) (hash(x)=86426388)
323
+ 2670 train 3.903484 (lr=3.0109e-04) (hash(x)=76692638)
324
+ 2680 train 3.824401 (lr=2.9878e-04) (hash(x)=77446063)
325
+ 2690 train 3.832089 (lr=2.9648e-04) (hash(x)=79809050)
326
+ 2700 val loss 3.8660
327
+ 2700 val perplexity 47.7526
328
+ 2700 train 3.756493 (lr=2.9418e-04) (hash(x)=83116823)
329
+ 2710 train 3.873871 (lr=2.9189e-04) (hash(x)=75622148)
330
+ 2720 train 3.804423 (lr=2.8959e-04) (hash(x)=80690512)
331
+ 2730 train 3.880655 (lr=2.8730e-04) (hash(x)=78009984)
332
+ 2740 train 3.828596 (lr=2.8502e-04) (hash(x)=74205488)
333
+ 2750 train 3.837606 (lr=2.8273e-04) (hash(x)=91013332)
334
+ 2760 train 3.753824 (lr=2.8045e-04) (hash(x)=76954961)
335
+ 2770 train 3.763358 (lr=2.7818e-04) (hash(x)=79390317)
336
+ 2780 train 3.636682 (lr=2.7591e-04) (hash(x)=70168783)
337
+ 2790 train 3.815640 (lr=2.7364e-04) (hash(x)=76028417)
338
+ 2800 val loss 3.8460
339
+ 2800 val perplexity 46.8056
340
+ 2800 train 3.636314 (lr=2.7137e-04) (hash(x)=77656050)
341
+ 2810 train 3.669113 (lr=2.6911e-04) (hash(x)=94248216)
342
+ 2820 train 3.734297 (lr=2.6686e-04) (hash(x)=78305078)
343
+ 2830 train 3.641318 (lr=2.6461e-04) (hash(x)=79948848)
344
+ 2840 train 3.502400 (lr=2.6236e-04) (hash(x)=85341024)
345
+ 2850 train 3.807054 (lr=2.6012e-04) (hash(x)=78735170)
346
+ 2860 train 4.066610 (lr=2.5788e-04) (hash(x)=71616419)
347
+ 2870 train 3.818464 (lr=2.5565e-04) (hash(x)=78656517)
348
+ 2880 train 3.846994 (lr=2.5343e-04) (hash(x)=80073987)
349
+ 2890 train 3.778906 (lr=2.5121e-04) (hash(x)=76894809)
350
+ 2900 val loss 3.8191
351
+ 2900 val perplexity 45.5642
352
+ 2900 train 3.758083 (lr=2.4899e-04) (hash(x)=80499838)
353
+ 2910 train 3.813251 (lr=2.4679e-04) (hash(x)=72673354)
354
+ 2920 train 3.797680 (lr=2.4458e-04) (hash(x)=84265768)
355
+ 2930 train 3.818400 (lr=2.4239e-04) (hash(x)=79612060)
356
+ 2940 train 3.672692 (lr=2.4020e-04) (hash(x)=74970087)
357
+ 2950 train 3.834738 (lr=2.3802e-04) (hash(x)=84166818)
358
+ 2960 train 3.818449 (lr=2.3584e-04) (hash(x)=89410221)
359
+ 2970 train 3.755245 (lr=2.3367e-04) (hash(x)=75672566)
360
+ 2980 train 3.614504 (lr=2.3151e-04) (hash(x)=81760314)
361
+ 2990 train 3.583991 (lr=2.2936e-04) (hash(x)=80605200)
362
+ 3000 val loss 3.8153
363
+ 3000 val perplexity 45.3921
364
+ 3000 train 3.574738 (lr=2.2721e-04) (hash(x)=83804735)
365
+ 3010 train 3.510992 (lr=2.2507e-04) (hash(x)=77015303)
366
+ 3020 train 3.582888 (lr=2.2294e-04) (hash(x)=81464523)
367
+ 3030 train 3.754169 (lr=2.2082e-04) (hash(x)=80168230)
368
+ 3040 train 3.703672 (lr=2.1870e-04) (hash(x)=84817006)
369
+ 3050 train 3.823137 (lr=2.1659e-04) (hash(x)=71601811)
370
+ 3060 train 3.747623 (lr=2.1449e-04) (hash(x)=85499733)
371
+ 3070 train 3.812778 (lr=2.1240e-04) (hash(x)=78670408)
372
+ 3080 train 3.700468 (lr=2.1032e-04) (hash(x)=77120468)
373
+ 3090 train 3.802423 (lr=2.0825e-04) (hash(x)=77927426)
374
+ 3100 val loss 3.7943
375
+ 3100 val perplexity 44.4482
376
+ 3100 train 3.821735 (lr=2.0619e-04) (hash(x)=83998606)
377
+ 3110 train 3.730011 (lr=2.0413e-04) (hash(x)=77990218)
378
+ 3120 train 3.635248 (lr=2.0208e-04) (hash(x)=81623970)
379
+ 3130 train 3.609702 (lr=2.0005e-04) (hash(x)=75014781)
380
+ 3140 train 3.663582 (lr=1.9802e-04) (hash(x)=72591250)
381
+ 3150 train 3.681089 (lr=1.9600e-04) (hash(x)=81421847)
382
+ 3160 train 3.718182 (lr=1.9400e-04) (hash(x)=75112631)
383
+ 3170 train 3.805322 (lr=1.9200e-04) (hash(x)=87518033)
384
+ 3180 train 3.678021 (lr=1.9001e-04) (hash(x)=78648348)
385
+ 3190 train 3.778646 (lr=1.8804e-04) (hash(x)=78270029)
386
+ 3200 val loss 3.7759
387
+ 3200 val perplexity 43.6360
388
+ 3200 train 3.839443 (lr=1.8607e-04) (hash(x)=83380714)
389
+ 3210 train 4.036620 (lr=1.8412e-04) (hash(x)=67804991)
390
+ 3220 train 3.760511 (lr=1.8217e-04) (hash(x)=76968804)
391
+ 3230 train 3.694715 (lr=1.8024e-04) (hash(x)=75798670)
392
+ 3240 train 3.773014 (lr=1.7831e-04) (hash(x)=83192811)
393
+ 3250 train 3.702786 (lr=1.7640e-04) (hash(x)=81659789)
394
+ 3260 train 3.760989 (lr=1.7450e-04) (hash(x)=80622502)
395
+ 3270 train 3.720280 (lr=1.7261e-04) (hash(x)=85436511)
396
+ 3280 train 3.666389 (lr=1.7074e-04) (hash(x)=80589180)
397
+ 3290 train 3.809917 (lr=1.6887e-04) (hash(x)=85747193)
398
+ 3300 val loss 3.7718
399
+ 3300 val perplexity 43.4601
400
+ 3300 train 3.673318 (lr=1.6702e-04) (hash(x)=74635692)
401
+ 3310 train 3.657753 (lr=1.6518e-04) (hash(x)=82045455)
402
+ 3320 train 3.529346 (lr=1.6335e-04) (hash(x)=73249173)
403
+ 3330 train 3.496098 (lr=1.6153e-04) (hash(x)=71492338)
404
+ 3340 train 3.820335 (lr=1.5972e-04) (hash(x)=76374071)
405
+ 3350 train 3.787876 (lr=1.5793e-04) (hash(x)=80474064)
406
+ 3360 train 3.692667 (lr=1.5615e-04) (hash(x)=84390892)
407
+ 3370 train 3.741816 (lr=1.5438e-04) (hash(x)=83399949)
408
+ 3380 train 3.797963 (lr=1.5263e-04) (hash(x)=77648059)
409
+ 3390 train 3.770819 (lr=1.5089e-04) (hash(x)=80152701)
410
+ 3400 val loss 3.7511
411
+ 3400 val perplexity 42.5687
412
+ 3400 train 3.819565 (lr=1.4916e-04) (hash(x)=81952545)
413
+ 3410 train 3.776698 (lr=1.4745e-04) (hash(x)=80908993)
414
+ 3420 train 3.828337 (lr=1.4575e-04) (hash(x)=79610037)
415
+ 3430 train 3.694364 (lr=1.4406e-04) (hash(x)=87624382)
416
+ 3440 train 3.753950 (lr=1.4239e-04) (hash(x)=82336381)
417
+ 3450 train 3.728707 (lr=1.4073e-04) (hash(x)=87687835)
418
+ 3460 train 3.725184 (lr=1.3908e-04) (hash(x)=76376135)
419
+ 3470 train 3.648288 (lr=1.3745e-04) (hash(x)=79331391)
420
+ 3480 train 3.550665 (lr=1.3583e-04) (hash(x)=107002681)
421
+ 3490 train 3.633443 (lr=1.3423e-04) (hash(x)=77465514)
422
+ 3500 val loss 3.7473
423
+ 3500 val perplexity 42.4046
424
+ 3500 train 3.587634 (lr=1.3264e-04) (hash(x)=88237229)
425
+ 3510 train 3.653171 (lr=1.3107e-04) (hash(x)=81011739)
426
+ 3520 train 3.721961 (lr=1.2951e-04) (hash(x)=64643427)
427
+ 3530 train 3.986450 (lr=1.2796e-04) (hash(x)=78029539)
428
+ 3540 train 3.812087 (lr=1.2643e-04) (hash(x)=83188968)
429
+ 3550 train 3.824206 (lr=1.2492e-04) (hash(x)=86104185)
430
+ 3560 train 3.750349 (lr=1.2342e-04) (hash(x)=81993629)
431
+ 3570 train 3.731870 (lr=1.2193e-04) (hash(x)=72719368)
432
+ 3580 train 3.730203 (lr=1.2046e-04) (hash(x)=90091487)
433
+ 3590 train 3.624060 (lr=1.1901e-04) (hash(x)=77393152)
434
+ 3600 val loss 3.7307
435
+ 3600 val perplexity 41.7097
436
+ 3600 train 3.743039 (lr=1.1757e-04) (hash(x)=73103504)
437
+ 3610 train 3.743801 (lr=1.1615e-04) (hash(x)=84429400)
438
+ 3620 train 3.716208 (lr=1.1474e-04) (hash(x)=77021795)
439
+ 3630 train 3.673646 (lr=1.1335e-04) (hash(x)=92830605)
440
+ 3640 train 3.877163 (lr=1.1197e-04) (hash(x)=78313175)
441
+ 3650 train 3.679147 (lr=1.1062e-04) (hash(x)=85395549)
442
+ 3660 train 3.466781 (lr=1.0927e-04) (hash(x)=78114459)
443
+ 3670 train 3.592271 (lr=1.0795e-04) (hash(x)=74968316)
444
+ 3680 train 3.520341 (lr=1.0664e-04) (hash(x)=73358737)
445
+ 3690 train 3.606351 (lr=1.0534e-04) (hash(x)=76399442)
446
+ 3700 val loss 3.7300
447
+ 3700 val perplexity 41.6774
448
+ 3700 train 3.623710 (lr=1.0407e-04) (hash(x)=74039273)
449
+ 3710 train 3.787826 (lr=1.0280e-04) (hash(x)=79299680)
450
+ 3720 train 3.695810 (lr=1.0156e-04) (hash(x)=94701498)
451
+ 3730 train 3.784164 (lr=1.0033e-04) (hash(x)=75352071)
452
+ 3740 train 3.664125 (lr=9.9124e-05) (hash(x)=77909487)
453
+ 3750 train 3.785180 (lr=9.7931e-05) (hash(x)=73986730)
454
+ 3760 train 3.729086 (lr=9.6755e-05) (hash(x)=79325763)
455
+ 3770 train 3.784531 (lr=9.5596e-05) (hash(x)=72457818)
456
+ 3780 train 3.905213 (lr=9.4454e-05) (hash(x)=71775590)
457
+ 3790 train 3.799231 (lr=9.3330e-05) (hash(x)=82638943)
458
+ 3800 val loss 3.7149
459
+ 3800 val perplexity 41.0546
460
+ 3800 train 3.777917 (lr=9.2224e-05) (hash(x)=79965893)
461
+ 3810 train 3.710453 (lr=9.1134e-05) (hash(x)=72598235)
462
+ 3820 train 3.692875 (lr=9.0063e-05) (hash(x)=83113889)
463
+ 3830 train 3.744063 (lr=8.9009e-05) (hash(x)=74434590)
464
+ 3840 train 3.669098 (lr=8.7973e-05) (hash(x)=82860348)
465
+ 3850 train 3.611331 (lr=8.6954e-05) (hash(x)=78067565)
466
+ 3860 train 3.664964 (lr=8.5954e-05) (hash(x)=82592498)
467
+ 3870 train 3.603543 (lr=8.4971e-05) (hash(x)=81820733)
468
+ 3880 train 3.620313 (lr=8.4007e-05) (hash(x)=87709040)
469
+ 3890 train 3.652857 (lr=8.3061e-05) (hash(x)=70379093)
470
+ 3900 val loss 3.7094
471
+ 3900 val perplexity 40.8294
472
+ 3900 train 3.774960 (lr=8.2133e-05) (hash(x)=76597431)
473
+ 3910 train 3.660633 (lr=8.1223e-05) (hash(x)=90490716)
474
+ 3920 train 3.770563 (lr=8.0331e-05) (hash(x)=81970659)
475
+ 3930 train 3.752832 (lr=7.9458e-05) (hash(x)=81496334)
476
+ 3940 train 3.742235 (lr=7.8604e-05) (hash(x)=75717605)
477
+ 3950 train 3.679429 (lr=7.7768e-05) (hash(x)=82749357)
478
+ 3960 train 3.712869 (lr=7.6950e-05) (hash(x)=89355157)
479
+ 3970 train 3.665347 (lr=7.6151e-05) (hash(x)=78980403)
480
+ 3980 train 3.639618 (lr=7.5371e-05) (hash(x)=76627217)
481
+ 3990 train 3.642324 (lr=7.4610e-05) (hash(x)=72412879)
482
+ 4000 val loss 3.7042
483
+ 4000 val perplexity 40.6167
484
+ 4000 train 3.657221 (lr=7.3867e-05) (hash(x)=83018142)
485
+ 4010 train 3.682138 (lr=7.3143e-05) (hash(x)=81272436)
486
+ 4020 train 3.605317 (lr=7.2438e-05) (hash(x)=85497482)
487
+ 4030 train 3.661194 (lr=7.1753e-05) (hash(x)=85241734)
488
+ 4040 train 3.631224 (lr=7.1086e-05) (hash(x)=77925307)
489
+ 4050 train 3.632481 (lr=7.0438e-05) (hash(x)=84826179)
490
+ 4060 train 3.689243 (lr=6.9809e-05) (hash(x)=83606764)
491
+ 4070 train 3.604437 (lr=6.9200e-05) (hash(x)=80567590)
492
+ 4080 train 3.666875 (lr=6.8610e-05) (hash(x)=76860998)
493
+ 4090 train 3.706736 (lr=6.8039e-05) (hash(x)=74902328)
494
+ 4100 val loss 3.6941
495
+ 4100 val perplexity 40.2095
496
+ 4100 train 3.743418 (lr=6.7487e-05) (hash(x)=82832041)
497
+ 4110 train 3.621112 (lr=6.6955e-05) (hash(x)=79143262)
498
+ 4120 train 3.654513 (lr=6.6442e-05) (hash(x)=77038149)
499
+ 4130 train 3.825789 (lr=6.5948e-05) (hash(x)=86339074)
500
+ 4140 train 3.908206 (lr=6.5474e-05) (hash(x)=76686216)
501
+ 4150 train 3.662502 (lr=6.5020e-05) (hash(x)=70522682)
502
+ 4160 train 3.632463 (lr=6.4585e-05) (hash(x)=90958555)
503
+ 4170 train 3.642974 (lr=6.4169e-05) (hash(x)=91463532)
504
+ 4180 train 3.642266 (lr=6.3773e-05) (hash(x)=81959329)
505
+ 4190 train 3.605531 (lr=6.3397e-05) (hash(x)=83146752)
506
+ 4200 val loss 3.6929
507
+ 4200 val perplexity 40.1599
508
+ 4200 train 3.578906 (lr=6.3040e-05) (hash(x)=78361715)
509
+ 4210 train 3.730519 (lr=6.2703e-05) (hash(x)=87364889)
510
+ 4220 train 3.591410 (lr=6.2386e-05) (hash(x)=70465156)
511
+ 4230 train 3.747005 (lr=6.2089e-05) (hash(x)=84524081)
512
+ 4240 train 3.688734 (lr=6.1811e-05) (hash(x)=77824868)
513
+ 4250 train 3.710096 (lr=6.1553e-05) (hash(x)=81710711)
514
+ 4260 train 3.693176 (lr=6.1314e-05) (hash(x)=76362728)
515
+ 4270 train 3.691400 (lr=6.1096e-05) (hash(x)=83115208)
516
+ 4280 train 3.602577 (lr=6.0897e-05) (hash(x)=87218314)
517
+ 4290 train 3.762576 (lr=6.0718e-05) (hash(x)=74582673)
518
+ 4300 val loss 3.6892
519
+ 4300 val perplexity 40.0114
520
+ 4300 train 3.626175 (lr=6.0559e-05) (hash(x)=77379615)
521
+ 4310 train 3.614428 (lr=6.0420e-05) (hash(x)=78669579)
522
+ 4320 train 3.718278 (lr=6.0301e-05) (hash(x)=83066608)
523
+ 4330 train 3.692140 (lr=6.0201e-05) (hash(x)=83037340)
524
+ 4340 train 3.718928 (lr=6.0122e-05) (hash(x)=82849771)
525
+ 4350 train 3.665056 (lr=6.0062e-05) (hash(x)=76693985)
526
+ 4360 train 3.758386 (lr=6.0022e-05) (hash(x)=77745394)
527
+ 4370 train 3.716155 (lr=6.0002e-05) (hash(x)=79954388)
528
+ 4374 val loss 3.6838
529
+ 4374 val perplexity 39.7963
12_head_baseline_lr_6e-4/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bbce8146b1bbcc16be5c0c94bd792bd0b55d572bb94be3f91c51b6fada48fee
3
+ size 498607490
12_head_baseline_lr_6e-4/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a1665da605bfe90dc8713d8d7db0fdb6aa319f9def626c3891c70a7f09951b6
3
+ size 990934406