SAELens
CallumMcDougallGDM commited on
Commit
f9ffa70
·
verified ·
1 Parent(s): f78f8c7

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. mlp_out/layer_13_width_16k_l0_big/config.json +10 -0
  2. mlp_out/layer_13_width_16k_l0_medium/config.json +10 -0
  3. mlp_out/layer_13_width_16k_l0_small/config.json +10 -0
  4. mlp_out/layer_13_width_1m_l0_big/config.json +10 -0
  5. mlp_out/layer_13_width_1m_l0_medium/config.json +10 -0
  6. mlp_out/layer_13_width_1m_l0_small/config.json +10 -0
  7. mlp_out/layer_13_width_262k_l0_big/config.json +10 -0
  8. mlp_out/layer_13_width_262k_l0_medium/config.json +10 -0
  9. mlp_out/layer_13_width_262k_l0_small/config.json +10 -0
  10. mlp_out/layer_13_width_65k_l0_big/config.json +10 -0
  11. mlp_out/layer_13_width_65k_l0_medium/config.json +10 -0
  12. mlp_out/layer_17_width_16k_l0_medium/config.json +10 -0
  13. mlp_out/layer_17_width_16k_l0_small/config.json +10 -0
  14. mlp_out/layer_17_width_1m_l0_big/config.json +10 -0
  15. mlp_out/layer_17_width_1m_l0_medium/config.json +10 -0
  16. mlp_out/layer_17_width_1m_l0_small/config.json +10 -0
  17. mlp_out/layer_17_width_262k_l0_big/config.json +10 -0
  18. mlp_out/layer_17_width_262k_l0_medium/config.json +10 -0
  19. mlp_out/layer_17_width_262k_l0_small/config.json +10 -0
  20. mlp_out/layer_17_width_65k_l0_big/config.json +10 -0
  21. mlp_out/layer_17_width_65k_l0_medium/config.json +10 -0
  22. mlp_out/layer_17_width_65k_l0_small/config.json +10 -0
  23. mlp_out/layer_22_width_16k_l0_big/config.json +10 -0
  24. mlp_out/layer_22_width_16k_l0_small/config.json +10 -0
  25. mlp_out/layer_22_width_1m_l0_big/config.json +10 -0
  26. mlp_out/layer_22_width_1m_l0_medium/config.json +10 -0
  27. mlp_out/layer_22_width_1m_l0_small/config.json +10 -0
  28. mlp_out/layer_22_width_262k_l0_big/config.json +10 -0
  29. mlp_out/layer_22_width_262k_l0_medium/config.json +10 -0
  30. mlp_out/layer_22_width_262k_l0_small/config.json +10 -0
  31. mlp_out/layer_22_width_65k_l0_big/config.json +10 -0
  32. mlp_out/layer_22_width_65k_l0_medium/config.json +10 -0
  33. mlp_out/layer_22_width_65k_l0_small/config.json +10 -0
  34. mlp_out/layer_7_width_16k_l0_big/config.json +10 -0
  35. mlp_out/layer_7_width_16k_l0_medium/config.json +10 -0
  36. mlp_out/layer_7_width_16k_l0_small/config.json +10 -0
  37. mlp_out/layer_7_width_1m_l0_big/config.json +10 -0
  38. mlp_out/layer_7_width_1m_l0_medium/config.json +10 -0
  39. mlp_out/layer_7_width_1m_l0_small/config.json +10 -0
  40. mlp_out/layer_7_width_262k_l0_big/config.json +10 -0
  41. mlp_out/layer_7_width_262k_l0_medium/config.json +10 -0
  42. mlp_out/layer_7_width_262k_l0_small/config.json +10 -0
  43. mlp_out/layer_7_width_65k_l0_big/config.json +10 -0
  44. mlp_out/layer_7_width_65k_l0_medium/config.json +10 -0
  45. mlp_out/layer_7_width_65k_l0_small/config.json +10 -0
  46. mlp_out_all/layer_12_width_262k_l0_small/config.json +10 -0
  47. mlp_out_all/layer_14_width_262k_l0_big/config.json +10 -0
  48. mlp_out_all/layer_16_width_16k_l0_small/config.json +10 -0
  49. mlp_out_all/layer_19_width_262k_l0_big/config.json +10 -0
  50. mlp_out_all/layer_1_width_262k_l0_big/config.json +10 -0
mlp_out/layer_13_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_1m_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_1m_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_1m_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_13_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.13.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.13.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_1m_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_1m_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_1m_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_1m_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_1m_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_1m_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 132,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 54,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 18,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_1m_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 132,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_1m_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 54,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_1m_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 18,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 132,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 54,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 18,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 132,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 54,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_7_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.7.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 18,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out_all/layer_12_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.12.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.12.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out_all/layer_14_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.14.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.14.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 120,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out_all/layer_16_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.16.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.16.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out_all/layer_19_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.19.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.19.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 120,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out_all/layer_1_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.1.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.1.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-1b-it",
6
+ "architecture": "jump_relu",
7
+ "l0": 66,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }