Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- attn_out_all/layer_10_width_262k_l0_small/config.json +10 -0
- attn_out_all/layer_11_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_12_width_16k_l0_big/config.json +10 -0
- attn_out_all/layer_12_width_262k_l0_small/config.json +10 -0
- attn_out_all/layer_13_width_262k_l0_small/config.json +10 -0
- attn_out_all/layer_14_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_15_width_16k_l0_small/config.json +10 -0
- attn_out_all/layer_16_width_16k_l0_small/config.json +10 -0
- attn_out_all/layer_16_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_18_width_16k_l0_small/config.json +10 -0
- attn_out_all/layer_19_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_1_width_16k_l0_big/config.json +10 -0
- attn_out_all/layer_1_width_16k_l0_small/config.json +10 -0
- attn_out_all/layer_1_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_20_width_16k_l0_big/config.json +10 -0
- attn_out_all/layer_21_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_23_width_16k_l0_small/config.json +10 -0
- attn_out_all/layer_24_width_16k_l0_big/config.json +10 -0
- attn_out_all/layer_25_width_16k_l0_small/config.json +10 -0
- attn_out_all/layer_3_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_4_width_262k_l0_small/config.json +10 -0
- attn_out_all/layer_6_width_16k_l0_big/config.json +10 -0
- attn_out_all/layer_6_width_262k_l0_big/config.json +10 -0
- attn_out_all/layer_8_width_16k_l0_small/config.json +10 -0
- attn_out_all/layer_9_width_16k_l0_big/config.json +10 -0
- mlp_out/layer_13_width_65k_l0_small/config.json +10 -0
- mlp_out/layer_17_width_16k_l0_big/config.json +10 -0
- mlp_out/layer_22_width_16k_l0_medium/config.json +10 -0
- mlp_out_all/layer_0_width_16k_l0_big/config.json +10 -0
- mlp_out_all/layer_0_width_16k_l0_small/config.json +10 -0
- mlp_out_all/layer_0_width_262k_l0_big/config.json +10 -0
- mlp_out_all/layer_0_width_262k_l0_small/config.json +10 -0
- mlp_out_all/layer_10_width_16k_l0_big/config.json +10 -0
- mlp_out_all/layer_10_width_16k_l0_small/config.json +10 -0
- mlp_out_all/layer_10_width_262k_l0_big/config.json +10 -0
- mlp_out_all/layer_10_width_262k_l0_small/config.json +10 -0
- mlp_out_all/layer_11_width_16k_l0_big/config.json +10 -0
- mlp_out_all/layer_11_width_16k_l0_small/config.json +10 -0
- mlp_out_all/layer_11_width_262k_l0_big/config.json +10 -0
- mlp_out_all/layer_11_width_262k_l0_small/config.json +10 -0
- mlp_out_all/layer_12_width_16k_l0_big/config.json +10 -0
- mlp_out_all/layer_12_width_16k_l0_small/config.json +10 -0
- mlp_out_all/layer_12_width_262k_l0_big/config.json +10 -0
- mlp_out_all/layer_13_width_16k_l0_big/config.json +10 -0
- mlp_out_all/layer_13_width_16k_l0_small/config.json +10 -0
- mlp_out_all/layer_13_width_262k_l0_big/config.json +10 -0
- mlp_out_all/layer_13_width_262k_l0_small/config.json +10 -0
- mlp_out_all/layer_14_width_16k_l0_big/config.json +10 -0
- mlp_out_all/layer_14_width_16k_l0_small/config.json +10 -0
- mlp_out_all/layer_14_width_262k_l0_small/config.json +10 -0
attn_out_all/layer_10_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.10.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.10.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_11_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.11.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.11.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_12_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.12.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.12.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_12_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.12.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.12.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_13_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.13.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.13.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_14_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.14.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.14.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_15_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.15.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.15.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_16_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.16.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.16.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_16_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.16.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.16.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_18_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.18.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.18.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_19_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.19.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.19.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_1_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.1.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.1.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 66,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_1_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.1.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.1.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 11,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_1_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.1.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.1.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 66,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_20_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.20.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.20.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_21_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.21.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.21.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_23_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.23.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.23.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_24_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.24.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.24.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_25_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.25.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.25.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_3_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.3.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.3.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 80,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_4_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.4.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.4.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 14,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_6_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.6.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.6.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 101,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_6_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.6.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.6.self_attn.o_proj.input",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 101,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_8_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.8.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.8.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 19,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
attn_out_all/layer_9_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.9.self_attn.o_proj.input",
|
| 3 |
+
"hf_hook_point_out": "model.layers.9.self_attn.o_proj.input",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out/layer_13_width_65k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 65536,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out/layer_17_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 150,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out/layer_22_width_16k_l0_medium/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 60,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_0_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.0.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.0.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 60,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_0_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.0.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.0.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 10,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_0_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.0.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.0.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 60,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_0_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.0.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.0.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 10,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_10_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.10.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.10.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_10_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.10.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.10.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_10_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.10.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.10.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_10_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.10.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.10.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_11_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.11.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.11.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_11_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.11.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.11.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_11_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.11.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.11.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_11_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.11.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.11.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_12_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.12.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.12.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_12_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.12.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.12.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_12_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.12.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.12.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_13_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_13_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_13_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_13_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_14_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.14.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.14.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_14_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.14.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.14.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|
mlp_out_all/layer_14_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.14.post_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.14.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-1b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
+
}
|