andrew-healey commited on
Commit
495e389
·
verified ·
1 Parent(s): 637c5dd

Upload folder using huggingface_hub

Browse files
attention_kindself_n_heads2_seed1340/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindself_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_2_1340", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindself_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_30720_2_1340", "n_embd": 128}
attention_kindself_n_heads2_seed1340/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.7850
3
  0 val perplexity 131263.8281
4
- 0 train 11.783751 (lr=2.5000e-07) (hash(x)=164406924)
5
- 100 val loss 10.2153
6
- 100 val perplexity 27317.5938
7
- 100 train 10.519998 (lr=2.5250e-05) (hash(x)=177407419)
8
- 200 val loss 9.2584
9
- 200 val perplexity 10491.9414
10
- 200 train 9.213327 (lr=5.0000e-05) (hash(x)=144903932)
11
- 300 val loss 8.3475
12
- 300 val perplexity 4219.7266
13
- 300 train 8.584803 (lr=4.9988e-05) (hash(x)=173839165)
14
- 400 val loss 7.8809
15
- 400 val perplexity 2646.2000
16
- 400 train 7.953244 (lr=4.9954e-05) (hash(x)=167734596)
17
- 500 val loss 7.6863
18
- 500 val perplexity 2178.3276
19
- 500 train 7.714057 (lr=4.9896e-05) (hash(x)=153224076)
20
- 600 val loss 7.6061
21
- 600 val perplexity 2010.5010
22
- 600 train 7.539889 (lr=4.9815e-05) (hash(x)=149619098)
23
- 700 val loss 7.5700
24
- 700 val perplexity 1939.2294
25
- 700 train 7.530424 (lr=4.9712e-05) (hash(x)=146539909)
26
- 800 val loss 7.5437
27
- 800 val perplexity 1888.8961
28
- 800 train 7.513658 (lr=4.9585e-05) (hash(x)=153710890)
29
- 900 val loss 7.5254
30
- 900 val perplexity 1854.6277
31
- 900 train 7.481957 (lr=4.9436e-05) (hash(x)=155873620)
32
- 1000 val loss 7.5028
33
- 1000 val perplexity 1813.1682
34
- 1000 train 7.427788 (lr=4.9264e-05) (hash(x)=145450636)
35
- 1100 val loss 7.4721
36
- 1100 val perplexity 1758.3578
37
- 1100 train 7.517784 (lr=4.9070e-05) (hash(x)=154123388)
38
- 1200 val loss 7.4442
39
- 1200 val perplexity 1709.9911
40
- 1200 train 7.313373 (lr=4.8854e-05) (hash(x)=145249251)
41
- 1300 val loss 7.4193
42
- 1300 val perplexity 1667.8983
43
- 1300 train 7.340686 (lr=4.8616e-05) (hash(x)=148937127)
44
- 1400 val loss 7.3984
45
- 1400 val perplexity 1633.3445
46
- 1400 train 7.506289 (lr=4.8356e-05) (hash(x)=150475545)
47
- 1500 val loss 7.3806
48
- 1500 val perplexity 1604.5392
49
- 1500 train 7.321553 (lr=4.8074e-05) (hash(x)=154653428)
50
- 1600 val loss 7.3591
51
- 1600 val perplexity 1570.3586
52
- 1600 train 7.332868 (lr=4.7772e-05) (hash(x)=144483776)
53
- 1700 val loss 7.3520
54
- 1700 val perplexity 1559.3756
55
- 1700 train 7.522246 (lr=4.7448e-05) (hash(x)=157395496)
56
- 1800 val loss 7.3280
57
- 1800 val perplexity 1522.4097
58
- 1800 train 7.344601 (lr=4.7105e-05) (hash(x)=157916369)
59
- 1900 val loss 7.3029
60
- 1900 val perplexity 1484.6582
61
- 1900 train 7.511676 (lr=4.6741e-05) (hash(x)=166073923)
62
- 2000 val loss 7.2946
63
- 2000 val perplexity 1472.3054
64
- 2000 train 7.389924 (lr=4.6357e-05) (hash(x)=154856891)
65
- 2100 val loss 7.2708
66
- 2100 val perplexity 1437.6372
67
- 2100 train 7.253731 (lr=4.5954e-05) (hash(x)=151925203)
68
- 2200 val loss 7.2635
69
- 2200 val perplexity 1427.1794
70
- 2200 train 7.057482 (lr=4.5532e-05) (hash(x)=136191502)
71
- 2300 val loss 7.2440
72
- 2300 val perplexity 1399.7255
73
- 2300 train 7.358443 (lr=4.5091e-05) (hash(x)=153273362)
74
- 2400 val loss 7.2415
75
- 2400 val perplexity 1396.1399
76
- 2400 train 7.184960 (lr=4.4633e-05) (hash(x)=148021541)
77
- 2500 val loss 7.2281
78
- 2500 val perplexity 1377.6011
79
- 2500 train 7.135568 (lr=4.4156e-05) (hash(x)=141356608)
80
- 2600 val loss 7.1998
81
- 2600 val perplexity 1339.2205
82
- 2600 train 7.167821 (lr=4.3663e-05) (hash(x)=146005217)
83
- 2700 val loss 7.2029
84
- 2700 val perplexity 1343.2925
85
- 2700 train 7.051125 (lr=4.3153e-05) (hash(x)=144511718)
86
- 2800 val loss 7.1891
87
- 2800 val perplexity 1324.8468
88
- 2800 train 7.080490 (lr=4.2627e-05) (hash(x)=146019502)
89
- 2900 val loss 7.1863
90
- 2900 val perplexity 1321.1443
91
- 2900 train 7.086877 (lr=4.2085e-05) (hash(x)=146496200)
92
- 3000 val loss 7.1647
93
- 3000 val perplexity 1292.9366
94
- 3000 train 7.103776 (lr=4.1529e-05) (hash(x)=150127281)
95
- 3100 val loss 7.1465
96
- 3100 val perplexity 1269.6981
97
- 3100 train 7.101170 (lr=4.0957e-05) (hash(x)=142022255)
98
- 3200 val loss 7.1417
99
- 3200 val perplexity 1263.6141
100
- 3200 train 7.215256 (lr=4.0373e-05) (hash(x)=154120875)
101
- 3300 val loss 7.1316
102
- 3300 val perplexity 1250.8468
103
- 3300 train 7.165504 (lr=3.9775e-05) (hash(x)=153999717)
104
- 3400 val loss 7.1289
105
- 3400 val perplexity 1247.4594
106
- 3400 train 6.944142 (lr=3.9164e-05) (hash(x)=139694097)
107
- 3500 val loss 7.1219
108
- 3500 val perplexity 1238.8192
109
- 3500 train 7.308336 (lr=3.8541e-05) (hash(x)=162992732)
110
- 3600 val loss 7.1138
111
- 3600 val perplexity 1228.8492
112
- 3600 train 7.102546 (lr=3.7907e-05) (hash(x)=147574101)
113
- 3700 val loss 7.0983
114
- 3700 val perplexity 1209.8594
115
- 3700 train 7.209728 (lr=3.7262e-05) (hash(x)=157763099)
116
- 3800 val loss 7.0881
117
- 3800 val perplexity 1197.6820
118
- 3800 train 7.257846 (lr=3.6608e-05) (hash(x)=170800034)
119
- 3900 val loss 7.0837
120
- 3900 val perplexity 1192.3428
121
- 3900 train 7.171274 (lr=3.5944e-05) (hash(x)=164984528)
122
- 4000 val loss 7.0732
123
- 4000 val perplexity 1179.8843
124
- 4000 train 6.981183 (lr=3.5271e-05) (hash(x)=141743323)
125
- 4100 val loss 7.0703
126
- 4100 val perplexity 1176.5262
127
- 4100 train 7.100652 (lr=3.4590e-05) (hash(x)=153392872)
128
- 4200 val loss 7.0737
129
- 4200 val perplexity 1180.4982
130
- 4200 train 6.974932 (lr=3.3902e-05) (hash(x)=149074933)
131
- 4300 val loss 7.0585
132
- 4300 val perplexity 1162.6697
133
- 4300 train 7.428771 (lr=3.3207e-05) (hash(x)=167823423)
134
- 4400 val loss 7.0469
135
- 4400 val perplexity 1149.3439
136
- 4400 train 6.795209 (lr=3.2507e-05) (hash(x)=141203114)
137
- 4500 val loss 7.0391
138
- 4500 val perplexity 1140.3953
139
- 4500 train 7.029208 (lr=3.1801e-05) (hash(x)=146284780)
140
- 4600 val loss 7.0294
141
- 4600 val perplexity 1129.3047
142
- 4600 train 6.851205 (lr=3.1091e-05) (hash(x)=141126464)
143
- 4700 val loss 7.0254
144
- 4700 val perplexity 1124.8435
145
- 4700 train 7.030688 (lr=3.0377e-05) (hash(x)=154751926)
146
- 4800 val loss 7.0234
147
- 4800 val perplexity 1122.6107
148
- 4800 train 7.117683 (lr=2.9661e-05) (hash(x)=154793198)
149
- 4900 val loss 7.0168
150
- 4900 val perplexity 1115.1973
151
- 4900 train 6.778317 (lr=2.8942e-05) (hash(x)=139406392)
152
- 5000 val loss 7.0337
153
- 5000 val perplexity 1134.2751
154
- 5000 train 6.844306 (lr=2.8221e-05) (hash(x)=153548741)
155
- 5100 val loss 7.0095
156
- 5100 val perplexity 1107.0522
157
- 5100 train 7.066870 (lr=2.7500e-05) (hash(x)=160488568)
158
- 5200 val loss 7.0205
159
- 5200 val perplexity 1119.3335
160
- 5200 train 7.026616 (lr=2.6779e-05) (hash(x)=149645053)
161
- 5300 val loss 6.9927
162
- 5300 val perplexity 1088.7074
163
- 5300 train 7.063434 (lr=2.6058e-05) (hash(x)=155820556)
164
- 5400 val loss 6.9874
165
- 5400 val perplexity 1082.9535
166
- 5400 train 6.922039 (lr=2.5339e-05) (hash(x)=147538134)
167
- 5500 val loss 6.9887
168
- 5500 val perplexity 1084.3253
169
- 5500 train 7.190579 (lr=2.4623e-05) (hash(x)=166889307)
170
- 5600 val loss 6.9805
171
- 5600 val perplexity 1075.4659
172
- 5600 train 6.727509 (lr=2.3909e-05) (hash(x)=139516699)
173
- 5700 val loss 6.9832
174
- 5700 val perplexity 1078.3936
175
- 5700 train 6.713973 (lr=2.3199e-05) (hash(x)=140453511)
176
- 5800 val loss 6.9777
177
- 5800 val perplexity 1072.4686
178
- 5800 train 6.954536 (lr=2.2493e-05) (hash(x)=162964847)
179
- 5900 val loss 6.9741
180
- 5900 val perplexity 1068.5493
181
- 5900 train 6.990734 (lr=2.1793e-05) (hash(x)=150606634)
182
- 6000 val loss 6.9755
183
- 6000 val perplexity 1070.1249
184
- 6000 train 7.088573 (lr=2.1098e-05) (hash(x)=149890857)
185
- 6100 val loss 6.9629
186
- 6100 val perplexity 1056.6700
187
- 6100 train 7.077487 (lr=2.0410e-05) (hash(x)=173884145)
188
- 6200 val loss 6.9650
189
- 6200 val perplexity 1058.8923
190
- 6200 train 6.981199 (lr=1.9729e-05) (hash(x)=151987098)
191
- 6300 val loss 6.9520
192
- 6300 val perplexity 1045.2423
193
- 6300 train 6.875057 (lr=1.9056e-05) (hash(x)=148853562)
194
- 6400 val loss 6.9520
195
- 6400 val perplexity 1045.2517
196
- 6400 train 6.736102 (lr=1.8392e-05) (hash(x)=141530101)
197
- 6500 val loss 6.9462
198
- 6500 val perplexity 1039.2115
199
- 6500 train 6.808595 (lr=1.7738e-05) (hash(x)=142297809)
200
- 6600 val loss 6.9353
201
- 6600 val perplexity 1027.9309
202
- 6600 train 6.806956 (lr=1.7093e-05) (hash(x)=142447782)
203
- 6700 val loss 6.9346
204
- 6700 val perplexity 1027.2244
205
- 6700 train 6.861821 (lr=1.6459e-05) (hash(x)=147004686)
206
- 6800 val loss 6.9320
207
- 6800 val perplexity 1024.5104
208
- 6800 train 6.689178 (lr=1.5836e-05) (hash(x)=133438702)
209
- 6900 val loss 6.9255
210
- 6900 val perplexity 1017.9020
211
- 6900 train 6.938148 (lr=1.5225e-05) (hash(x)=157085143)
212
- 7000 val loss 6.9227
213
- 7000 val perplexity 1015.0225
214
- 7000 train 6.838539 (lr=1.4627e-05) (hash(x)=139437666)
215
- 7100 val loss 6.9212
216
- 7100 val perplexity 1013.5634
217
- 7100 train 7.026873 (lr=1.4043e-05) (hash(x)=159792986)
218
- 7200 val loss 6.9126
219
- 7200 val perplexity 1004.8762
220
- 7200 train 6.871055 (lr=1.3471e-05) (hash(x)=144930687)
221
- 7300 val loss 6.9154
222
- 7300 val perplexity 1007.6875
223
- 7300 train 7.010877 (lr=1.2915e-05) (hash(x)=156242690)
224
- 7400 val loss 6.9077
225
- 7400 val perplexity 999.9543
226
- 7400 train 6.786288 (lr=1.2373e-05) (hash(x)=148183719)
227
- 7500 val loss 6.9037
228
- 7500 val perplexity 996.0017
229
- 7500 train 6.925545 (lr=1.1847e-05) (hash(x)=152494758)
230
- 7600 val loss 6.8995
231
- 7600 val perplexity 991.7601
232
- 7600 train 6.657605 (lr=1.1337e-05) (hash(x)=142485027)
233
- 7700 val loss 6.9031
234
- 7700 val perplexity 995.3195
235
- 7700 train 6.775089 (lr=1.0844e-05) (hash(x)=147512165)
236
- 7800 val loss 6.8954
237
- 7800 val perplexity 987.7213
238
- 7800 train 6.882179 (lr=1.0367e-05) (hash(x)=160346994)
239
- 7900 val loss 6.8917
240
- 7900 val perplexity 984.0620
241
- 7900 train 6.744603 (lr=9.9088e-06) (hash(x)=144488254)
242
- 8000 val loss 6.8868
243
- 8000 val perplexity 979.2140
244
- 8000 train 6.706420 (lr=9.4682e-06) (hash(x)=147637019)
245
- 8100 val loss 6.8843
246
- 8100 val perplexity 976.8035
247
- 8100 train 6.756386 (lr=9.0461e-06) (hash(x)=147340534)
248
- 8200 val loss 6.8822
249
- 8200 val perplexity 974.7460
250
- 8200 train 6.908176 (lr=8.6430e-06) (hash(x)=151630665)
251
- 8300 val loss 6.8787
252
- 8300 val perplexity 971.4084
253
- 8300 train 6.929987 (lr=8.2593e-06) (hash(x)=149747064)
254
- 8400 val loss 6.8742
255
- 8400 val perplexity 967.0470
256
- 8400 train 7.068721 (lr=7.8953e-06) (hash(x)=154245770)
257
- 8500 val loss 6.8727
258
- 8500 val perplexity 965.5495
259
- 8500 train 6.761356 (lr=7.5515e-06) (hash(x)=152559100)
260
- 8600 val loss 6.8694
261
- 8600 val perplexity 962.3356
262
- 8600 train 7.423410 (lr=7.2282e-06) (hash(x)=181365926)
263
- 8700 val loss 6.8702
264
- 8700 val perplexity 963.1868
265
- 8700 train 6.756258 (lr=6.9257e-06) (hash(x)=154405991)
266
- 8800 val loss 6.8678
267
- 8800 val perplexity 960.8546
268
- 8800 train 6.839130 (lr=6.6444e-06) (hash(x)=153755904)
269
- 8900 val loss 6.8658
270
- 8900 val perplexity 958.8810
271
- 8900 train 6.806930 (lr=6.3845e-06) (hash(x)=152120568)
272
- 9000 val loss 6.8628
273
- 9000 val perplexity 956.0494
274
- 9000 train 6.690845 (lr=6.1462e-06) (hash(x)=142797279)
275
- 9100 val loss 6.8586
276
- 9100 val perplexity 952.0648
277
- 9100 train 6.739343 (lr=5.9300e-06) (hash(x)=143037503)
278
- 9200 val loss 6.8569
279
- 9200 val perplexity 950.3707
280
- 9200 train 6.632078 (lr=5.7359e-06) (hash(x)=113690273)
281
- 9300 val loss 6.8565
282
- 9300 val perplexity 950.0820
283
- 9300 train 6.822642 (lr=5.5641e-06) (hash(x)=158025077)
284
- 9400 val loss 6.8566
285
- 9400 val perplexity 950.1423
286
- 9400 train 6.950947 (lr=5.4149e-06) (hash(x)=158251718)
287
- 9500 val loss 6.8543
288
- 9500 val perplexity 947.9561
289
- 9500 train 6.906723 (lr=5.2884e-06) (hash(x)=154752610)
290
- 9600 val loss 6.8483
291
- 9600 val perplexity 942.2529
292
- 9600 train 6.734294 (lr=5.1847e-06) (hash(x)=146889093)
293
- 9700 val loss 6.8470
294
- 9700 val perplexity 941.0433
295
- 9700 train 6.834735 (lr=5.1040e-06) (hash(x)=156906516)
296
- 9800 val loss 6.8473
297
- 9800 val perplexity 941.3678
298
- 9800 train 6.732006 (lr=5.0462e-06) (hash(x)=153841927)
299
- 9900 val loss 6.8446
300
- 9900 val perplexity 938.7849
301
- 9900 train 7.058944 (lr=5.0116e-06) (hash(x)=163514334)
302
- 9999 val loss 6.8432
303
- 9999 val perplexity 937.4626
 
1
  max_steps: 10000
2
  0 val loss 11.7850
3
  0 val perplexity 131263.8281
4
+ 0 train 11.783751 (lr=1.5000e-07) (hash(x)=164406924)
5
+ 100 val loss 10.3274
6
+ 100 val perplexity 30559.0293
7
+ 100 train 10.625359 (lr=1.5150e-05) (hash(x)=177407419)
8
+ 200 val loss 9.8243
9
+ 200 val perplexity 18476.9238
10
+ 200 train 9.782228 (lr=3.0000e-05) (hash(x)=144903932)
11
+ 300 val loss 9.0817
12
+ 300 val perplexity 8793.1387
13
+ 300 train 9.250924 (lr=2.9993e-05) (hash(x)=173839165)
14
+ 400 val loss 8.6313
15
+ 400 val perplexity 5604.1348
16
+ 400 train 8.679514 (lr=2.9972e-05) (hash(x)=167734596)
17
+ 500 val loss 8.3174
18
+ 500 val perplexity 4094.3989
19
+ 500 train 8.330243 (lr=2.9938e-05) (hash(x)=153224076)
20
+ 600 val loss 8.0823
21
+ 600 val perplexity 3236.7158
22
+ 600 train 8.084147 (lr=2.9889e-05) (hash(x)=149619098)
23
+ 700 val loss 7.9125
24
+ 700 val perplexity 2731.2461
25
+ 700 train 7.883194 (lr=2.9827e-05) (hash(x)=146539909)
26
+ 800 val loss 7.7864
27
+ 800 val perplexity 2407.7214
28
+ 800 train 7.754084 (lr=2.9751e-05) (hash(x)=153710890)
29
+ 900 val loss 7.7050
30
+ 900 val perplexity 2219.3728
31
+ 900 train 7.678699 (lr=2.9662e-05) (hash(x)=155873620)
32
+ 1000 val loss 7.6431
33
+ 1000 val perplexity 2086.2842
34
+ 1000 train 7.582088 (lr=2.9558e-05) (hash(x)=145450636)
35
+ 1100 val loss 7.5988
36
+ 1100 val perplexity 1995.7732
37
+ 1100 train 7.636743 (lr=2.9442e-05) (hash(x)=154123388)
38
+ 1200 val loss 7.5662
39
+ 1200 val perplexity 1931.7372
40
+ 1200 train 7.433980 (lr=2.9312e-05) (hash(x)=145249251)
41
+ 1300 val loss 7.5439
42
+ 1300 val perplexity 1889.2493
43
+ 1300 train 7.472591 (lr=2.9169e-05) (hash(x)=148937127)
44
+ 1400 val loss 7.5294
45
+ 1400 val perplexity 1862.0782
46
+ 1400 train 7.627462 (lr=2.9013e-05) (hash(x)=150475545)
47
+ 1500 val loss 7.5084
48
+ 1500 val perplexity 1823.3295
49
+ 1500 train 7.451326 (lr=2.8845e-05) (hash(x)=154653428)
50
+ 1600 val loss 7.4894
51
+ 1600 val perplexity 1788.9817
52
+ 1600 train 7.470479 (lr=2.8663e-05) (hash(x)=144483776)
53
+ 1700 val loss 7.4684
54
+ 1700 val perplexity 1751.7314
55
+ 1700 train 7.634889 (lr=2.8469e-05) (hash(x)=157395496)
56
+ 1800 val loss 7.4524
57
+ 1800 val perplexity 1723.9410
58
+ 1800 train 7.474338 (lr=2.8263e-05) (hash(x)=157916369)
59
+ 1900 val loss 7.4368
60
+ 1900 val perplexity 1697.3846
61
+ 1900 train 7.635174 (lr=2.8044e-05) (hash(x)=166073923)
62
+ 2000 val loss 7.4216
63
+ 2000 val perplexity 1671.6597
64
+ 2000 train 7.528570 (lr=2.7814e-05) (hash(x)=154856891)
65
+ 2100 val loss 7.4115
66
+ 2100 val perplexity 1654.9597
67
+ 2100 train 7.391711 (lr=2.7572e-05) (hash(x)=151925203)
68
+ 2200 val loss 7.4000
69
+ 2200 val perplexity 1636.0275
70
+ 2200 train 7.224116 (lr=2.7319e-05) (hash(x)=136191502)
71
+ 2300 val loss 7.3818
72
+ 2300 val perplexity 1606.4937
73
+ 2300 train 7.472443 (lr=2.7055e-05) (hash(x)=153273362)
74
+ 2400 val loss 7.3818
75
+ 2400 val perplexity 1606.5304
76
+ 2400 train 7.312902 (lr=2.6780e-05) (hash(x)=148021541)
77
+ 2500 val loss 7.3635
78
+ 2500 val perplexity 1577.3666
79
+ 2500 train 7.257758 (lr=2.6494e-05) (hash(x)=141356608)
80
+ 2600 val loss 7.3427
81
+ 2600 val perplexity 1544.8636
82
+ 2600 train 7.310372 (lr=2.6198e-05) (hash(x)=146005217)
83
+ 2700 val loss 7.3305
84
+ 2700 val perplexity 1526.0874
85
+ 2700 train 7.176082 (lr=2.5892e-05) (hash(x)=144511718)
86
+ 2800 val loss 7.3217
87
+ 2800 val perplexity 1512.8278
88
+ 2800 train 7.203008 (lr=2.5576e-05) (hash(x)=146019502)
89
+ 2900 val loss 7.3115
90
+ 2900 val perplexity 1497.4431
91
+ 2900 train 7.208506 (lr=2.5251e-05) (hash(x)=146496200)
92
+ 3000 val loss 7.3051
93
+ 3000 val perplexity 1487.8900
94
+ 3000 train 7.237123 (lr=2.4917e-05) (hash(x)=150127281)
95
+ 3100 val loss 7.2795
96
+ 3100 val perplexity 1450.1936
97
+ 3100 train 7.217702 (lr=2.4574e-05) (hash(x)=142022255)
98
+ 3200 val loss 7.2674
99
+ 3200 val perplexity 1432.8917
100
+ 3200 train 7.348095 (lr=2.4224e-05) (hash(x)=154120875)
101
+ 3300 val loss 7.2577
102
+ 3300 val perplexity 1418.9600
103
+ 3300 train 7.289066 (lr=2.3865e-05) (hash(x)=153999717)
104
+ 3400 val loss 7.2510
105
+ 3400 val perplexity 1409.4484
106
+ 3400 train 7.061343 (lr=2.3498e-05) (hash(x)=139694097)
107
+ 3500 val loss 7.2386
108
+ 3500 val perplexity 1392.1400
109
+ 3500 train 7.426363 (lr=2.3125e-05) (hash(x)=162992732)
110
+ 3600 val loss 7.2288
111
+ 3600 val perplexity 1378.5492
112
+ 3600 train 7.214731 (lr=2.2744e-05) (hash(x)=147574101)
113
+ 3700 val loss 7.2263
114
+ 3700 val perplexity 1375.1694
115
+ 3700 train 7.351956 (lr=2.2357e-05) (hash(x)=157763099)
116
+ 3800 val loss 7.2151
117
+ 3800 val perplexity 1359.7964
118
+ 3800 train 7.383568 (lr=2.1965e-05) (hash(x)=170800034)
119
+ 3900 val loss 7.2085
120
+ 3900 val perplexity 1350.8314
121
+ 3900 train 7.289079 (lr=2.1566e-05) (hash(x)=164984528)
122
+ 4000 val loss 7.2007
123
+ 4000 val perplexity 1340.3755
124
+ 4000 train 7.101239 (lr=2.1162e-05) (hash(x)=141743323)
125
+ 4100 val loss 7.1957
126
+ 4100 val perplexity 1333.7296
127
+ 4100 train 7.226369 (lr=2.0754e-05) (hash(x)=153392872)
128
+ 4200 val loss 7.1954
129
+ 4200 val perplexity 1333.2660
130
+ 4200 train 7.106156 (lr=2.0341e-05) (hash(x)=149074933)
131
+ 4300 val loss 7.1858
132
+ 4300 val perplexity 1320.5094
133
+ 4300 train 7.569630 (lr=1.9924e-05) (hash(x)=167823423)
134
+ 4400 val loss 7.1814
135
+ 4400 val perplexity 1314.7053
136
+ 4400 train 6.916952 (lr=1.9504e-05) (hash(x)=141203114)
137
+ 4500 val loss 7.1717
138
+ 4500 val perplexity 1302.1022
139
+ 4500 train 7.153078 (lr=1.9081e-05) (hash(x)=146284780)
140
+ 4600 val loss 7.1632
141
+ 4600 val perplexity 1291.0570
142
+ 4600 train 6.965153 (lr=1.8655e-05) (hash(x)=141126464)
143
+ 4700 val loss 7.1603
144
+ 4700 val perplexity 1287.3342
145
+ 4700 train 7.168801 (lr=1.8226e-05) (hash(x)=154751926)
146
+ 4800 val loss 7.1503
147
+ 4800 val perplexity 1274.5387
148
+ 4800 train 7.250330 (lr=1.7796e-05) (hash(x)=154793198)
149
+ 4900 val loss 7.1554
150
+ 4900 val perplexity 1280.9568
151
+ 4900 train 6.922604 (lr=1.7365e-05) (hash(x)=139406392)
152
+ 5000 val loss 7.1570
153
+ 5000 val perplexity 1283.1031
154
+ 5000 train 6.976234 (lr=1.6933e-05) (hash(x)=153548741)
155
+ 5100 val loss 7.1439
156
+ 5100 val perplexity 1266.3490
157
+ 5100 train 7.196578 (lr=1.6500e-05) (hash(x)=160488568)
158
+ 5200 val loss 7.1361
159
+ 5200 val perplexity 1256.5380
160
+ 5200 train 7.132746 (lr=1.6067e-05) (hash(x)=149645053)
161
+ 5300 val loss 7.1230
162
+ 5300 val perplexity 1240.2175
163
+ 5300 train 7.209235 (lr=1.5635e-05) (hash(x)=155820556)
164
+ 5400 val loss 7.1175
165
+ 5400 val perplexity 1233.3306
166
+ 5400 train 7.046651 (lr=1.5204e-05) (hash(x)=147538134)
167
+ 5500 val loss 7.1149
168
+ 5500 val perplexity 1230.1085
169
+ 5500 train 7.330192 (lr=1.4774e-05) (hash(x)=166889307)
170
+ 5600 val loss 7.1078
171
+ 5600 val perplexity 1221.4172
172
+ 5600 train 6.847104 (lr=1.4345e-05) (hash(x)=139516699)
173
+ 5700 val loss 7.1062
174
+ 5700 val perplexity 1219.5432
175
+ 5700 train 6.822536 (lr=1.3919e-05) (hash(x)=140453511)
176
+ 5800 val loss 7.1072
177
+ 5800 val perplexity 1220.6708
178
+ 5800 train 7.080517 (lr=1.3496e-05) (hash(x)=162964847)
179
+ 5900 val loss 7.0968
180
+ 5900 val perplexity 1208.1351
181
+ 5900 train 7.113507 (lr=1.3076e-05) (hash(x)=150606634)
182
+ 6000 val loss 7.0894
183
+ 6000 val perplexity 1199.1615
184
+ 6000 train 7.226833 (lr=1.2659e-05) (hash(x)=149890857)
185
+ 6100 val loss 7.0855
186
+ 6100 val perplexity 1194.4796
187
+ 6100 train 7.195603 (lr=1.2246e-05) (hash(x)=173884145)
188
+ 6200 val loss 7.0843
189
+ 6200 val perplexity 1193.1288
190
+ 6200 train 7.085426 (lr=1.1838e-05) (hash(x)=151987098)
191
+ 6300 val loss 7.0782
192
+ 6300 val perplexity 1185.8658
193
+ 6300 train 7.000040 (lr=1.1434e-05) (hash(x)=148853562)
194
+ 6400 val loss 7.0746
195
+ 6400 val perplexity 1181.5142
196
+ 6400 train 6.854797 (lr=1.1035e-05) (hash(x)=141530101)
197
+ 6500 val loss 7.0722
198
+ 6500 val perplexity 1178.6820
199
+ 6500 train 6.931275 (lr=1.0643e-05) (hash(x)=142297809)
200
+ 6600 val loss 7.0652
201
+ 6600 val perplexity 1170.4662
202
+ 6600 train 6.943073 (lr=1.0256e-05) (hash(x)=142447782)
203
+ 6700 val loss 7.0627
204
+ 6700 val perplexity 1167.6399
205
+ 6700 train 6.990753 (lr=9.8753e-06) (hash(x)=147004686)
206
+ 6800 val loss 7.0588
207
+ 6800 val perplexity 1163.0273
208
+ 6800 train 6.815724 (lr=9.5017e-06) (hash(x)=133438702)
209
+ 6900 val loss 7.0579
210
+ 6900 val perplexity 1161.9547
211
+ 6900 train 7.089042 (lr=9.1353e-06) (hash(x)=157085143)
212
+ 7000 val loss 7.0565
213
+ 7000 val perplexity 1160.3512
214
+ 7000 train 6.977046 (lr=8.7764e-06) (hash(x)=139437666)
215
+ 7100 val loss 7.0522
216
+ 7100 val perplexity 1155.3442
217
+ 7100 train 7.186143 (lr=8.4255e-06) (hash(x)=159792986)
218
+ 7200 val loss 7.0482
219
+ 7200 val perplexity 1150.7938
220
+ 7200 train 7.010453 (lr=8.0829e-06) (hash(x)=144930687)
221
+ 7300 val loss 7.0445
222
+ 7300 val perplexity 1146.4812
223
+ 7300 train 7.140266 (lr=7.7489e-06) (hash(x)=156242690)
224
+ 7400 val loss 7.0404
225
+ 7400 val perplexity 1141.8557
226
+ 7400 train 6.914301 (lr=7.4239e-06) (hash(x)=148183719)
227
+ 7500 val loss 7.0422
228
+ 7500 val perplexity 1143.8459
229
+ 7500 train 7.057545 (lr=7.1083e-06) (hash(x)=152494758)
230
+ 7600 val loss 7.0377
231
+ 7600 val perplexity 1138.7130
232
+ 7600 train 6.802165 (lr=6.8023e-06) (hash(x)=142485027)
233
+ 7700 val loss 7.0353
234
+ 7700 val perplexity 1136.0083
235
+ 7700 train 6.900186 (lr=6.5062e-06) (hash(x)=147512165)
236
+ 7800 val loss 7.0288
237
+ 7800 val perplexity 1128.7206
238
+ 7800 train 7.030202 (lr=6.2205e-06) (hash(x)=160346994)
239
+ 7900 val loss 7.0283
240
+ 7900 val perplexity 1128.1121
241
+ 7900 train 6.884702 (lr=5.9453e-06) (hash(x)=144488254)
242
+ 8000 val loss 7.0247
243
+ 8000 val perplexity 1124.0129
244
+ 8000 train 6.848904 (lr=5.6809e-06) (hash(x)=147637019)
245
+ 8100 val loss 7.0236
246
+ 8100 val perplexity 1122.7905
247
+ 8100 train 6.902358 (lr=5.4277e-06) (hash(x)=147340534)
248
+ 8200 val loss 7.0195
249
+ 8200 val perplexity 1118.2729
250
+ 8200 train 7.050259 (lr=5.1858e-06) (hash(x)=151630665)
251
+ 8300 val loss 7.0165
252
+ 8300 val perplexity 1114.8240
253
+ 8300 train 7.053640 (lr=4.9556e-06) (hash(x)=149747064)
254
+ 8400 val loss 7.0157
255
+ 8400 val perplexity 1114.0216
256
+ 8400 train 7.202093 (lr=4.7372e-06) (hash(x)=154245770)
257
+ 8500 val loss 7.0118
258
+ 8500 val perplexity 1109.6001
259
+ 8500 train 6.901859 (lr=4.5309e-06) (hash(x)=152559100)
260
+ 8600 val loss 7.0098
261
+ 8600 val perplexity 1107.4208
262
+ 8600 train 7.606647 (lr=4.3369e-06) (hash(x)=181365926)
263
+ 8700 val loss 7.0080
264
+ 8700 val perplexity 1105.4908
265
+ 8700 train 6.896190 (lr=4.1554e-06) (hash(x)=154405991)
266
+ 8800 val loss 7.0098
267
+ 8800 val perplexity 1107.4694
268
+ 8800 train 6.973182 (lr=3.9866e-06) (hash(x)=153755904)
269
+ 8900 val loss 7.0061
270
+ 8900 val perplexity 1103.3048
271
+ 8900 train 6.954730 (lr=3.8307e-06) (hash(x)=152120568)
272
+ 9000 val loss 7.0017
273
+ 9000 val perplexity 1098.5304
274
+ 9000 train 6.823189 (lr=3.6877e-06) (hash(x)=142797279)
275
+ 9100 val loss 7.0008
276
+ 9100 val perplexity 1097.4760
277
+ 9100 train 6.885849 (lr=3.5580e-06) (hash(x)=143037503)
278
+ 9200 val loss 7.0024
279
+ 9200 val perplexity 1099.3127
280
+ 9200 train 6.753475 (lr=3.4415e-06) (hash(x)=113690273)
281
+ 9300 val loss 6.9984
282
+ 9300 val perplexity 1094.8677
283
+ 9300 train 6.957781 (lr=3.3385e-06) (hash(x)=158025077)
284
+ 9400 val loss 6.9974
285
+ 9400 val perplexity 1093.8115
286
+ 9400 train 7.103577 (lr=3.2490e-06) (hash(x)=158251718)
287
+ 9500 val loss 6.9958
288
+ 9500 val perplexity 1091.9963
289
+ 9500 train 7.059623 (lr=3.1730e-06) (hash(x)=154752610)
290
+ 9600 val loss 6.9932
291
+ 9600 val perplexity 1089.1622
292
+ 9600 train 6.882071 (lr=3.1108e-06) (hash(x)=146889093)
293
+ 9700 val loss 6.9916
294
+ 9700 val perplexity 1087.4290
295
+ 9700 train 6.967501 (lr=3.0624e-06) (hash(x)=156906516)
296
+ 9800 val loss 6.9940
297
+ 9800 val perplexity 1090.0704
298
+ 9800 train 6.878489 (lr=3.0277e-06) (hash(x)=153841927)
299
+ 9900 val loss 6.9915
300
+ 9900 val perplexity 1087.3087
301
+ 9900 train 7.202410 (lr=3.0069e-06) (hash(x)=163514334)
302
+ 9999 val loss 6.9889
303
+ 9999 val perplexity 1084.5234
attention_kindself_n_heads2_seed1340/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eeafc887e2b94e00e67301b46f102ad80b7f69a47c0c0b11fbd2f5a8beba5926
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:088258a4f8fd3be214980af9d2ef27f2f3aa5cb5af03252f3793f6cf1bfd3869
3
  size 38587970
attention_kindself_n_heads2_seed1340/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e8898dbf3192822e6299f0bec848a16efb02ed628a1e23c7d9b4f7984fcad87
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f8b9e376f6482531687d21b299a121c6f4a647ede801aed168e10c4a06c4243
3
  size 38587970
attention_kindself_n_heads2_seed1340/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6dfeb80636e73201c22bb773d245c98071d88c0d13052531b6f3ca0d9fef7edd
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa886345b801b69a0727eab86b2687a5f01f51c3b6e9874b046494b92f117d7a
3
  size 38587970
attention_kindself_n_heads2_seed1340/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5189836419cd0484019a1c8cee8ae4fec805583887d635e19f734ea16d7c6384
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7173d9f35788c5405e1623d6f9f1f57c39c9d65c667224446e38e237ad5c5841
3
  size 38587970
attention_kindself_n_heads2_seed1340/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:95b6bdbfcfdb4f35baaf224d1e043e65f6bc2aec1ddda4ba3f86254645341eb6
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3cf4f6f719b5c7b8009447c8226a11d145a35a498618f4c4d34fc60aaeb71cd
3
  size 70895430
attention_kindself_n_heads2_seed1340/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c69a30dbbded26c6a2498ef5b43fa6bd6afeae0c6f415a5e730c6a1a2e13870f
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a54b7093531e4c2e5d10e3db0fc53fd1b3ca2ec04326afd46252957e072c4ec7
3
  size 70895430
attention_kindself_n_heads2_seed1340/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:285dba5d57b4ec14f08adc6ba58835a40ee8d442ca5d25624533f4d1e4de3fc2
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:201b2134aebf45cba772f7a389eff837316a92202158e887da9b4328a1424825
3
  size 70895430
attention_kindself_n_heads2_seed1340/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5e9f4f54f624117870c364ef89868cedc0a6799547ba5105a8d754a8abbc702e
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ec75c708b8273b9f72529fc6f66c09bd97afab9cddeb7c135333bf95e60c724
3
  size 70895430