Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- sparse_probing/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2-random-10_matryoshka_google_gemma-2-2b_random_matryoshka_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2-random-3_matryoshka_google_gemma-2-2b_random_matryoshka_batch_top_k_resid_post_layer_12_trainer_10_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +237 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_notemp_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +237 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_stop_grads_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +237 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_stop_grads_v2_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +670 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_1_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +237 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_1_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_5_custom_sae_eval_results.json +237 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_2_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +237 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_3_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +237 -0
- sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_3_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_4_custom_sae_eval_results.json +237 -0
- unlearning/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_4_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_5_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_5_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_5_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json +74 -0
- unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_4_custom_sae_eval_results.json +74 -0
sparse_probing/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "0dc92dac-1418-418f-8932-c013e2c39613",
|
30 |
+
"datetime_epoch_millis": 1737727656385,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9596000395715237,
|
34 |
+
"llm_top_1_test_accuracy": 0.6610499999999999,
|
35 |
+
"llm_top_2_test_accuracy": 0.7224937499999999,
|
36 |
+
"llm_top_5_test_accuracy": 0.78105625,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9555250469595195,
|
44 |
+
"sae_top_1_test_accuracy": 0.7601125,
|
45 |
+
"sae_top_2_test_accuracy": 0.8060062499999999,
|
46 |
+
"sae_top_5_test_accuracy": 0.86121875,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436,
|
58 |
+
"llm_top_2_test_accuracy": 0.6871999999999999,
|
59 |
+
"llm_top_5_test_accuracy": 0.79,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9612000465393067,
|
65 |
+
"sae_top_1_test_accuracy": 0.7386,
|
66 |
+
"sae_top_2_test_accuracy": 0.7969999999999999,
|
67 |
+
"sae_top_5_test_accuracy": 0.9124000000000001,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9556000471115113,
|
76 |
+
"llm_top_1_test_accuracy": 0.673,
|
77 |
+
"llm_top_2_test_accuracy": 0.7253999999999999,
|
78 |
+
"llm_top_5_test_accuracy": 0.7652000000000001,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9500000357627869,
|
84 |
+
"sae_top_1_test_accuracy": 0.6818,
|
85 |
+
"sae_top_2_test_accuracy": 0.7712,
|
86 |
+
"sae_top_5_test_accuracy": 0.8176,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9290000319480896,
|
95 |
+
"llm_top_1_test_accuracy": 0.69,
|
96 |
+
"llm_top_2_test_accuracy": 0.7346000000000001,
|
97 |
+
"llm_top_5_test_accuracy": 0.7618,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9290000557899475,
|
103 |
+
"sae_top_1_test_accuracy": 0.7642,
|
104 |
+
"sae_top_2_test_accuracy": 0.8190000000000002,
|
105 |
+
"sae_top_5_test_accuracy": 0.8527999999999999,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9156000256538391,
|
114 |
+
"llm_top_1_test_accuracy": 0.6083999999999999,
|
115 |
+
"llm_top_2_test_accuracy": 0.6452,
|
116 |
+
"llm_top_5_test_accuracy": 0.6788000000000001,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.917400062084198,
|
122 |
+
"sae_top_1_test_accuracy": 0.7436,
|
123 |
+
"sae_top_2_test_accuracy": 0.7962,
|
124 |
+
"sae_top_5_test_accuracy": 0.8193999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000658035278,
|
133 |
+
"llm_top_1_test_accuracy": 0.671,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9705000519752502,
|
141 |
+
"sae_top_1_test_accuracy": 0.902,
|
142 |
+
"sae_top_2_test_accuracy": 0.902,
|
143 |
+
"sae_top_5_test_accuracy": 0.957,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9736000418663024,
|
152 |
+
"llm_top_1_test_accuracy": 0.6594,
|
153 |
+
"llm_top_2_test_accuracy": 0.6931999999999999,
|
154 |
+
"llm_top_5_test_accuracy": 0.7554000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9678000450134278,
|
160 |
+
"sae_top_1_test_accuracy": 0.6407999999999999,
|
161 |
+
"sae_top_2_test_accuracy": 0.6738,
|
162 |
+
"sae_top_5_test_accuracy": 0.7102,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9530000388622284,
|
171 |
+
"llm_top_1_test_accuracy": 0.6920000000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.77675,
|
173 |
+
"llm_top_5_test_accuracy": 0.8282499999999999,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9495000392198563,
|
179 |
+
"sae_top_1_test_accuracy": 0.7335,
|
180 |
+
"sae_top_2_test_accuracy": 0.76225,
|
181 |
+
"sae_top_5_test_accuracy": 0.82575,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.651,
|
191 |
+
"llm_top_2_test_accuracy": 0.7936,
|
192 |
+
"llm_top_5_test_accuracy": 0.9030000000000001,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9988000392913818,
|
198 |
+
"sae_top_1_test_accuracy": 0.8764,
|
199 |
+
"sae_top_2_test_accuracy": 0.9266,
|
200 |
+
"sae_top_5_test_accuracy": 0.9945999999999999,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "f2d1d982515d2dee706eb23a1ca459b308988764",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_2",
|
210 |
+
"sae_lens_version": "5.3.2",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 65536,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9440000653266907,
|
240 |
+
"1": 0.9570000171661377,
|
241 |
+
"2": 0.9480000734329224,
|
242 |
+
"6": 0.9830000400543213,
|
243 |
+
"9": 0.9740000367164612
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.57,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.788,
|
257 |
+
"9": 0.552
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.584,
|
261 |
+
"1": 0.667,
|
262 |
+
"2": 0.674,
|
263 |
+
"6": 0.8,
|
264 |
+
"9": 0.711
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.704,
|
269 |
+
"2": 0.762,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.865
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.589,
|
275 |
+
"1": 0.595,
|
276 |
+
"2": 0.781,
|
277 |
+
"6": 0.825,
|
278 |
+
"9": 0.903
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.661,
|
282 |
+
"1": 0.635,
|
283 |
+
"2": 0.793,
|
284 |
+
"6": 0.982,
|
285 |
+
"9": 0.914
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.897,
|
289 |
+
"1": 0.835,
|
290 |
+
"2": 0.913,
|
291 |
+
"6": 0.985,
|
292 |
+
"9": 0.932
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9610000252723694,
|
298 |
+
"13": 0.9550000429153442,
|
299 |
+
"14": 0.9430000185966492,
|
300 |
+
"18": 0.9320000410079956,
|
301 |
+
"19": 0.9590000510215759
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.968000054359436,
|
305 |
+
"13": 0.956000030040741,
|
306 |
+
"14": 0.9600000381469727,
|
307 |
+
"18": 0.9350000619888306,
|
308 |
+
"19": 0.9590000510215759
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.56,
|
312 |
+
"13": 0.676,
|
313 |
+
"14": 0.631,
|
314 |
+
"18": 0.699,
|
315 |
+
"19": 0.799
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.767,
|
319 |
+
"13": 0.714,
|
320 |
+
"14": 0.66,
|
321 |
+
"18": 0.722,
|
322 |
+
"19": 0.764
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.788,
|
326 |
+
"13": 0.751,
|
327 |
+
"14": 0.725,
|
328 |
+
"18": 0.733,
|
329 |
+
"19": 0.829
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.57,
|
333 |
+
"13": 0.685,
|
334 |
+
"14": 0.648,
|
335 |
+
"18": 0.663,
|
336 |
+
"19": 0.843
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.738,
|
340 |
+
"13": 0.674,
|
341 |
+
"14": 0.877,
|
342 |
+
"18": 0.716,
|
343 |
+
"19": 0.851
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.863,
|
347 |
+
"13": 0.746,
|
348 |
+
"14": 0.881,
|
349 |
+
"18": 0.74,
|
350 |
+
"19": 0.858
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9590000510215759,
|
356 |
+
"21": 0.9260000586509705,
|
357 |
+
"22": 0.9180000424385071,
|
358 |
+
"25": 0.9480000734329224,
|
359 |
+
"26": 0.8940000534057617
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9540000557899475,
|
363 |
+
"21": 0.9240000247955322,
|
364 |
+
"22": 0.9230000376701355,
|
365 |
+
"25": 0.9570000171661377,
|
366 |
+
"26": 0.8870000243186951
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.694,
|
370 |
+
"21": 0.771,
|
371 |
+
"22": 0.654,
|
372 |
+
"25": 0.705,
|
373 |
+
"26": 0.626
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.803,
|
377 |
+
"21": 0.778,
|
378 |
+
"22": 0.664,
|
379 |
+
"25": 0.756,
|
380 |
+
"26": 0.672
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.816,
|
384 |
+
"21": 0.792,
|
385 |
+
"22": 0.716,
|
386 |
+
"25": 0.798,
|
387 |
+
"26": 0.687
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.897,
|
391 |
+
"21": 0.563,
|
392 |
+
"22": 0.861,
|
393 |
+
"25": 0.893,
|
394 |
+
"26": 0.607
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.904,
|
398 |
+
"21": 0.799,
|
399 |
+
"22": 0.859,
|
400 |
+
"25": 0.882,
|
401 |
+
"26": 0.651
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.928,
|
405 |
+
"21": 0.835,
|
406 |
+
"22": 0.863,
|
407 |
+
"25": 0.9,
|
408 |
+
"26": 0.738
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9390000700950623,
|
414 |
+
"2": 0.9350000619888306,
|
415 |
+
"3": 0.9160000681877136,
|
416 |
+
"5": 0.9250000715255737,
|
417 |
+
"6": 0.8720000386238098
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9600000381469727,
|
421 |
+
"2": 0.9290000200271606,
|
422 |
+
"3": 0.909000039100647,
|
423 |
+
"5": 0.9200000166893005,
|
424 |
+
"6": 0.8600000143051147
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.665,
|
428 |
+
"2": 0.59,
|
429 |
+
"3": 0.61,
|
430 |
+
"5": 0.58,
|
431 |
+
"6": 0.597
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.733,
|
435 |
+
"2": 0.638,
|
436 |
+
"3": 0.592,
|
437 |
+
"5": 0.614,
|
438 |
+
"6": 0.649
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.763,
|
442 |
+
"2": 0.648,
|
443 |
+
"3": 0.664,
|
444 |
+
"5": 0.657,
|
445 |
+
"6": 0.662
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.85,
|
449 |
+
"2": 0.834,
|
450 |
+
"3": 0.647,
|
451 |
+
"5": 0.785,
|
452 |
+
"6": 0.602
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.895,
|
456 |
+
"2": 0.839,
|
457 |
+
"3": 0.637,
|
458 |
+
"5": 0.884,
|
459 |
+
"6": 0.726
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.896,
|
463 |
+
"2": 0.859,
|
464 |
+
"3": 0.698,
|
465 |
+
"5": 0.882,
|
466 |
+
"6": 0.762
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9700000286102295,
|
472 |
+
"5.0": 0.971000075340271
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9810000658035278,
|
476 |
+
"5.0": 0.9810000658035278
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.671,
|
480 |
+
"5.0": 0.671
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.902,
|
492 |
+
"5.0": 0.902
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.902,
|
496 |
+
"5.0": 0.902
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.957,
|
500 |
+
"5.0": 0.957
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9520000219345093,
|
506 |
+
"Python": 0.9810000658035278,
|
507 |
+
"HTML": 0.987000048160553,
|
508 |
+
"Java": 0.9630000591278076,
|
509 |
+
"PHP": 0.956000030040741
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9660000205039978,
|
513 |
+
"Python": 0.9880000352859497,
|
514 |
+
"HTML": 0.9890000224113464,
|
515 |
+
"Java": 0.9670000672340393,
|
516 |
+
"PHP": 0.9580000638961792
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.663,
|
520 |
+
"Python": 0.629,
|
521 |
+
"HTML": 0.78,
|
522 |
+
"Java": 0.628,
|
523 |
+
"PHP": 0.597
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.667,
|
527 |
+
"Python": 0.672,
|
528 |
+
"HTML": 0.791,
|
529 |
+
"Java": 0.682,
|
530 |
+
"PHP": 0.654
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.753,
|
534 |
+
"Python": 0.715,
|
535 |
+
"HTML": 0.905,
|
536 |
+
"Java": 0.726,
|
537 |
+
"PHP": 0.678
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.631,
|
541 |
+
"Python": 0.611,
|
542 |
+
"HTML": 0.707,
|
543 |
+
"Java": 0.658,
|
544 |
+
"PHP": 0.597
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.639,
|
548 |
+
"Python": 0.636,
|
549 |
+
"HTML": 0.81,
|
550 |
+
"Java": 0.665,
|
551 |
+
"PHP": 0.619
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.707,
|
555 |
+
"Python": 0.679,
|
556 |
+
"HTML": 0.888,
|
557 |
+
"Java": 0.662,
|
558 |
+
"PHP": 0.615
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9450000524520874,
|
564 |
+
"1": 0.9800000190734863,
|
565 |
+
"2": 0.9290000200271606,
|
566 |
+
"3": 0.9440000653266907
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9430000185966492,
|
570 |
+
"1": 0.9850000739097595,
|
571 |
+
"2": 0.9290000200271606,
|
572 |
+
"3": 0.9550000429153442
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.801,
|
576 |
+
"1": 0.661,
|
577 |
+
"2": 0.673,
|
578 |
+
"3": 0.633
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.804,
|
582 |
+
"1": 0.795,
|
583 |
+
"2": 0.689,
|
584 |
+
"3": 0.819
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.814,
|
588 |
+
"1": 0.883,
|
589 |
+
"2": 0.768,
|
590 |
+
"3": 0.848
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.641,
|
594 |
+
"1": 0.862,
|
595 |
+
"2": 0.763,
|
596 |
+
"3": 0.668
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.733,
|
600 |
+
"1": 0.852,
|
601 |
+
"2": 0.79,
|
602 |
+
"3": 0.674
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.817,
|
606 |
+
"1": 0.925,
|
607 |
+
"2": 0.79,
|
608 |
+
"3": 0.771
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.9980000257492065,
|
614 |
+
"fr": 1.0,
|
615 |
+
"de": 0.9980000257492065,
|
616 |
+
"es": 0.999000072479248,
|
617 |
+
"nl": 0.999000072479248
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 0.999000072479248,
|
622 |
+
"de": 0.999000072479248,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.746,
|
628 |
+
"fr": 0.593,
|
629 |
+
"de": 0.749,
|
630 |
+
"es": 0.504,
|
631 |
+
"nl": 0.663
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.825,
|
635 |
+
"fr": 0.595,
|
636 |
+
"de": 0.823,
|
637 |
+
"es": 0.969,
|
638 |
+
"nl": 0.756
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.89,
|
642 |
+
"fr": 0.927,
|
643 |
+
"de": 0.841,
|
644 |
+
"es": 0.985,
|
645 |
+
"nl": 0.872
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.848,
|
649 |
+
"fr": 0.993,
|
650 |
+
"de": 0.911,
|
651 |
+
"es": 0.874,
|
652 |
+
"nl": 0.756
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.844,
|
656 |
+
"fr": 0.991,
|
657 |
+
"de": 0.916,
|
658 |
+
"es": 0.883,
|
659 |
+
"nl": 0.999
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 1.0,
|
663 |
+
"fr": 0.996,
|
664 |
+
"de": 0.988,
|
665 |
+
"es": 0.991,
|
666 |
+
"nl": 0.998
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2-random-10_matryoshka_google_gemma-2-2b_random_matryoshka_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 16,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "ddbfa36a-3495-4457-9417-b8e03b12a04c",
|
30 |
+
"datetime_epoch_millis": 1737070210596,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9587812922894955,
|
34 |
+
"llm_top_1_test_accuracy": 0.67096875,
|
35 |
+
"llm_top_2_test_accuracy": 0.7207812499999999,
|
36 |
+
"llm_top_5_test_accuracy": 0.7776125,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9490312915295362,
|
44 |
+
"sae_top_1_test_accuracy": 0.73191875,
|
45 |
+
"sae_top_2_test_accuracy": 0.78306875,
|
46 |
+
"sae_top_5_test_accuracy": 0.8452437500000001,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.966800057888031,
|
57 |
+
"llm_top_1_test_accuracy": 0.64,
|
58 |
+
"llm_top_2_test_accuracy": 0.696,
|
59 |
+
"llm_top_5_test_accuracy": 0.7876,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9576000332832336,
|
65 |
+
"sae_top_1_test_accuracy": 0.7698,
|
66 |
+
"sae_top_2_test_accuracy": 0.8141999999999999,
|
67 |
+
"sae_top_5_test_accuracy": 0.8478,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9496000409126282,
|
76 |
+
"llm_top_1_test_accuracy": 0.6706000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7243999999999999,
|
78 |
+
"llm_top_5_test_accuracy": 0.7584,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9438000559806824,
|
84 |
+
"sae_top_1_test_accuracy": 0.6848,
|
85 |
+
"sae_top_2_test_accuracy": 0.7776,
|
86 |
+
"sae_top_5_test_accuracy": 0.8628,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9318000435829162,
|
95 |
+
"llm_top_1_test_accuracy": 0.6944,
|
96 |
+
"llm_top_2_test_accuracy": 0.7378,
|
97 |
+
"llm_top_5_test_accuracy": 0.7484,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9192000269889832,
|
103 |
+
"sae_top_1_test_accuracy": 0.7212,
|
104 |
+
"sae_top_2_test_accuracy": 0.8089999999999999,
|
105 |
+
"sae_top_5_test_accuracy": 0.8338000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9166000366210938,
|
114 |
+
"llm_top_1_test_accuracy": 0.6068,
|
115 |
+
"llm_top_2_test_accuracy": 0.6372,
|
116 |
+
"llm_top_5_test_accuracy": 0.675,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9068000435829162,
|
122 |
+
"sae_top_1_test_accuracy": 0.6142000000000001,
|
123 |
+
"sae_top_2_test_accuracy": 0.6806,
|
124 |
+
"sae_top_5_test_accuracy": 0.755,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9815000295639038,
|
133 |
+
"llm_top_1_test_accuracy": 0.673,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9550000429153442,
|
141 |
+
"sae_top_1_test_accuracy": 0.767,
|
142 |
+
"sae_top_2_test_accuracy": 0.788,
|
143 |
+
"sae_top_5_test_accuracy": 0.903,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9716000437736512,
|
152 |
+
"llm_top_1_test_accuracy": 0.6624000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6916,
|
154 |
+
"llm_top_5_test_accuracy": 0.7562,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.966800045967102,
|
160 |
+
"sae_top_1_test_accuracy": 0.63,
|
161 |
+
"sae_top_2_test_accuracy": 0.6852,
|
162 |
+
"sae_top_5_test_accuracy": 0.7264,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9527500569820404,
|
171 |
+
"llm_top_1_test_accuracy": 0.69075,
|
172 |
+
"llm_top_2_test_accuracy": 0.75725,
|
173 |
+
"llm_top_5_test_accuracy": 0.8265,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9482500404119492,
|
179 |
+
"sae_top_1_test_accuracy": 0.79975,
|
180 |
+
"sae_top_2_test_accuracy": 0.81475,
|
181 |
+
"sae_top_5_test_accuracy": 0.8707499999999999,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.7298,
|
191 |
+
"llm_top_2_test_accuracy": 0.7979999999999999,
|
192 |
+
"llm_top_5_test_accuracy": 0.9028,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9948000431060791,
|
198 |
+
"sae_top_1_test_accuracy": 0.8686,
|
199 |
+
"sae_top_2_test_accuracy": 0.8952,
|
200 |
+
"sae_top_5_test_accuracy": 0.9624,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matryoshka_gemma-2-2b-16k-v2_matryoshka_google_gemma-2-2b_random_matryoshka_batch_top_k_resid_post_layer_12_trainer_0",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.937000036239624,
|
240 |
+
"1": 0.9550000429153442,
|
241 |
+
"2": 0.9430000185966492,
|
242 |
+
"6": 0.9790000319480896,
|
243 |
+
"9": 0.9740000367164612
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9530000686645508,
|
249 |
+
"6": 0.987000048160553,
|
250 |
+
"9": 0.9760000705718994
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.577,
|
254 |
+
"1": 0.614,
|
255 |
+
"2": 0.662,
|
256 |
+
"6": 0.786,
|
257 |
+
"9": 0.561
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.573,
|
261 |
+
"1": 0.662,
|
262 |
+
"2": 0.719,
|
263 |
+
"6": 0.811,
|
264 |
+
"9": 0.715
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.714,
|
268 |
+
"1": 0.712,
|
269 |
+
"2": 0.755,
|
270 |
+
"6": 0.895,
|
271 |
+
"9": 0.862
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.581,
|
275 |
+
"1": 0.635,
|
276 |
+
"2": 0.877,
|
277 |
+
"6": 0.839,
|
278 |
+
"9": 0.917
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.609,
|
282 |
+
"1": 0.673,
|
283 |
+
"2": 0.891,
|
284 |
+
"6": 0.981,
|
285 |
+
"9": 0.917
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.699,
|
289 |
+
"1": 0.735,
|
290 |
+
"2": 0.901,
|
291 |
+
"6": 0.988,
|
292 |
+
"9": 0.916
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9600000381469727,
|
298 |
+
"13": 0.9480000734329224,
|
299 |
+
"14": 0.9440000653266907,
|
300 |
+
"18": 0.9070000648498535,
|
301 |
+
"19": 0.9600000381469727
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9580000638961792,
|
305 |
+
"13": 0.9500000476837158,
|
306 |
+
"14": 0.9600000381469727,
|
307 |
+
"18": 0.9230000376701355,
|
308 |
+
"19": 0.9570000171661377
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.552,
|
312 |
+
"13": 0.673,
|
313 |
+
"14": 0.636,
|
314 |
+
"18": 0.697,
|
315 |
+
"19": 0.795
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.766,
|
319 |
+
"13": 0.71,
|
320 |
+
"14": 0.663,
|
321 |
+
"18": 0.728,
|
322 |
+
"19": 0.755
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.789,
|
326 |
+
"13": 0.74,
|
327 |
+
"14": 0.709,
|
328 |
+
"18": 0.721,
|
329 |
+
"19": 0.833
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.58,
|
333 |
+
"13": 0.664,
|
334 |
+
"14": 0.664,
|
335 |
+
"18": 0.71,
|
336 |
+
"19": 0.806
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.84,
|
340 |
+
"13": 0.658,
|
341 |
+
"14": 0.83,
|
342 |
+
"18": 0.729,
|
343 |
+
"19": 0.831
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.916,
|
347 |
+
"13": 0.862,
|
348 |
+
"14": 0.864,
|
349 |
+
"18": 0.83,
|
350 |
+
"19": 0.842
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9520000219345093,
|
356 |
+
"21": 0.9150000214576721,
|
357 |
+
"22": 0.8970000147819519,
|
358 |
+
"25": 0.9550000429153442,
|
359 |
+
"26": 0.8770000338554382
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9500000476837158,
|
363 |
+
"21": 0.9300000667572021,
|
364 |
+
"22": 0.9280000329017639,
|
365 |
+
"25": 0.9650000333786011,
|
366 |
+
"26": 0.8860000371932983
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.715,
|
370 |
+
"21": 0.774,
|
371 |
+
"22": 0.658,
|
372 |
+
"25": 0.695,
|
373 |
+
"26": 0.63
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.803,
|
377 |
+
"21": 0.754,
|
378 |
+
"22": 0.685,
|
379 |
+
"25": 0.76,
|
380 |
+
"26": 0.687
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.818,
|
384 |
+
"21": 0.796,
|
385 |
+
"22": 0.664,
|
386 |
+
"25": 0.788,
|
387 |
+
"26": 0.676
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.775,
|
391 |
+
"21": 0.785,
|
392 |
+
"22": 0.725,
|
393 |
+
"25": 0.705,
|
394 |
+
"26": 0.616
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.891,
|
398 |
+
"21": 0.804,
|
399 |
+
"22": 0.761,
|
400 |
+
"25": 0.86,
|
401 |
+
"26": 0.729
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.91,
|
405 |
+
"21": 0.84,
|
406 |
+
"22": 0.752,
|
407 |
+
"25": 0.894,
|
408 |
+
"26": 0.773
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9460000395774841,
|
414 |
+
"2": 0.9220000505447388,
|
415 |
+
"3": 0.9080000519752502,
|
416 |
+
"5": 0.9050000309944153,
|
417 |
+
"6": 0.8530000448226929
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9470000267028809,
|
421 |
+
"2": 0.9360000491142273,
|
422 |
+
"3": 0.9150000214576721,
|
423 |
+
"5": 0.9110000729560852,
|
424 |
+
"6": 0.8740000128746033
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.667,
|
428 |
+
"2": 0.615,
|
429 |
+
"3": 0.591,
|
430 |
+
"5": 0.57,
|
431 |
+
"6": 0.591
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.744,
|
435 |
+
"2": 0.635,
|
436 |
+
"3": 0.605,
|
437 |
+
"5": 0.581,
|
438 |
+
"6": 0.621
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.779,
|
442 |
+
"2": 0.643,
|
443 |
+
"3": 0.607,
|
444 |
+
"5": 0.661,
|
445 |
+
"6": 0.685
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.764,
|
449 |
+
"2": 0.586,
|
450 |
+
"3": 0.557,
|
451 |
+
"5": 0.539,
|
452 |
+
"6": 0.625
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.767,
|
456 |
+
"2": 0.625,
|
457 |
+
"3": 0.568,
|
458 |
+
"5": 0.716,
|
459 |
+
"6": 0.727
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.891,
|
463 |
+
"2": 0.815,
|
464 |
+
"3": 0.611,
|
465 |
+
"5": 0.712,
|
466 |
+
"6": 0.746
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9570000171661377,
|
472 |
+
"5.0": 0.9530000686645508
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9800000190734863,
|
476 |
+
"5.0": 0.9830000400543213
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.673,
|
480 |
+
"5.0": 0.673
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.767,
|
492 |
+
"5.0": 0.767
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.788,
|
496 |
+
"5.0": 0.788
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.903,
|
500 |
+
"5.0": 0.903
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9540000557899475,
|
506 |
+
"Python": 0.9810000658035278,
|
507 |
+
"HTML": 0.9830000400543213,
|
508 |
+
"Java": 0.9640000462532043,
|
509 |
+
"PHP": 0.9520000219345093
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.968000054359436,
|
513 |
+
"Python": 0.9830000400543213,
|
514 |
+
"HTML": 0.9830000400543213,
|
515 |
+
"Java": 0.9660000205039978,
|
516 |
+
"PHP": 0.9580000638961792
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.662,
|
520 |
+
"Python": 0.637,
|
521 |
+
"HTML": 0.793,
|
522 |
+
"Java": 0.629,
|
523 |
+
"PHP": 0.591
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.664,
|
527 |
+
"Python": 0.674,
|
528 |
+
"HTML": 0.799,
|
529 |
+
"Java": 0.685,
|
530 |
+
"PHP": 0.636
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.751,
|
534 |
+
"Python": 0.725,
|
535 |
+
"HTML": 0.902,
|
536 |
+
"Java": 0.731,
|
537 |
+
"PHP": 0.672
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.605,
|
541 |
+
"Python": 0.634,
|
542 |
+
"HTML": 0.683,
|
543 |
+
"Java": 0.628,
|
544 |
+
"PHP": 0.6
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.631,
|
548 |
+
"Python": 0.652,
|
549 |
+
"HTML": 0.897,
|
550 |
+
"Java": 0.651,
|
551 |
+
"PHP": 0.595
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.665,
|
555 |
+
"Python": 0.703,
|
556 |
+
"HTML": 0.899,
|
557 |
+
"Java": 0.711,
|
558 |
+
"PHP": 0.654
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9410000443458557,
|
564 |
+
"1": 0.9800000190734863,
|
565 |
+
"2": 0.9300000667572021,
|
566 |
+
"3": 0.9420000314712524
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.940000057220459,
|
570 |
+
"1": 0.9900000691413879,
|
571 |
+
"2": 0.9250000715255737,
|
572 |
+
"3": 0.956000030040741
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.801,
|
576 |
+
"1": 0.665,
|
577 |
+
"2": 0.663,
|
578 |
+
"3": 0.634
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.726,
|
582 |
+
"1": 0.804,
|
583 |
+
"2": 0.682,
|
584 |
+
"3": 0.817
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.822,
|
588 |
+
"1": 0.884,
|
589 |
+
"2": 0.759,
|
590 |
+
"3": 0.841
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.834,
|
594 |
+
"1": 0.965,
|
595 |
+
"2": 0.727,
|
596 |
+
"3": 0.673
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.835,
|
600 |
+
"1": 0.966,
|
601 |
+
"2": 0.766,
|
602 |
+
"3": 0.692
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.858,
|
606 |
+
"1": 0.961,
|
607 |
+
"2": 0.847,
|
608 |
+
"3": 0.817
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 1.0,
|
614 |
+
"fr": 0.9900000691413879,
|
615 |
+
"de": 0.9950000643730164,
|
616 |
+
"es": 0.9930000305175781,
|
617 |
+
"nl": 0.9960000514984131
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 0.999000072479248,
|
621 |
+
"fr": 0.999000072479248,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.738,
|
628 |
+
"fr": 0.6,
|
629 |
+
"de": 0.739,
|
630 |
+
"es": 0.913,
|
631 |
+
"nl": 0.659
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.831,
|
635 |
+
"fr": 0.605,
|
636 |
+
"de": 0.826,
|
637 |
+
"es": 0.969,
|
638 |
+
"nl": 0.759
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.897,
|
642 |
+
"fr": 0.921,
|
643 |
+
"de": 0.859,
|
644 |
+
"es": 0.979,
|
645 |
+
"nl": 0.858
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.854,
|
649 |
+
"fr": 0.851,
|
650 |
+
"de": 0.87,
|
651 |
+
"es": 0.907,
|
652 |
+
"nl": 0.861
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.994,
|
656 |
+
"fr": 0.846,
|
657 |
+
"de": 0.873,
|
658 |
+
"es": 0.922,
|
659 |
+
"nl": 0.841
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.998,
|
663 |
+
"fr": 0.982,
|
664 |
+
"de": 0.909,
|
665 |
+
"es": 0.93,
|
666 |
+
"nl": 0.993
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2-random-3_matryoshka_google_gemma-2-2b_random_matryoshka_batch_top_k_resid_post_layer_12_trainer_10_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 16,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "72d04c50-0f4f-41bc-984c-e9711caab90f",
|
30 |
+
"datetime_epoch_millis": 1737155609764,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9579312890768051,
|
34 |
+
"llm_top_1_test_accuracy": 0.6534875,
|
35 |
+
"llm_top_2_test_accuracy": 0.72183125,
|
36 |
+
"llm_top_5_test_accuracy": 0.77963125,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9543375477194787,
|
44 |
+
"sae_top_1_test_accuracy": 0.73074375,
|
45 |
+
"sae_top_2_test_accuracy": 0.80368125,
|
46 |
+
"sae_top_5_test_accuracy": 0.87136875,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.966800057888031,
|
57 |
+
"llm_top_1_test_accuracy": 0.64,
|
58 |
+
"llm_top_2_test_accuracy": 0.696,
|
59 |
+
"llm_top_5_test_accuracy": 0.7876,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9644000411033631,
|
65 |
+
"sae_top_1_test_accuracy": 0.7562,
|
66 |
+
"sae_top_2_test_accuracy": 0.8068,
|
67 |
+
"sae_top_5_test_accuracy": 0.8698,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9564000368118286,
|
76 |
+
"llm_top_1_test_accuracy": 0.673,
|
77 |
+
"llm_top_2_test_accuracy": 0.7138,
|
78 |
+
"llm_top_5_test_accuracy": 0.763,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9478000521659851,
|
84 |
+
"sae_top_1_test_accuracy": 0.6714,
|
85 |
+
"sae_top_2_test_accuracy": 0.7666000000000001,
|
86 |
+
"sae_top_5_test_accuracy": 0.8572,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9260000348091125,
|
95 |
+
"llm_top_1_test_accuracy": 0.6944,
|
96 |
+
"llm_top_2_test_accuracy": 0.7378,
|
97 |
+
"llm_top_5_test_accuracy": 0.7484,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9250000357627869,
|
103 |
+
"sae_top_1_test_accuracy": 0.72,
|
104 |
+
"sae_top_2_test_accuracy": 0.7902,
|
105 |
+
"sae_top_5_test_accuracy": 0.8106,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9154000520706177,
|
114 |
+
"llm_top_1_test_accuracy": 0.6096,
|
115 |
+
"llm_top_2_test_accuracy": 0.6462,
|
116 |
+
"llm_top_5_test_accuracy": 0.6826000000000001,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9124000549316407,
|
122 |
+
"sae_top_1_test_accuracy": 0.7162,
|
123 |
+
"sae_top_2_test_accuracy": 0.7698,
|
124 |
+
"sae_top_5_test_accuracy": 0.8141999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.673,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9695000648498535,
|
141 |
+
"sae_top_1_test_accuracy": 0.864,
|
142 |
+
"sae_top_2_test_accuracy": 0.863,
|
143 |
+
"sae_top_5_test_accuracy": 0.907,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9708000421524048,
|
152 |
+
"llm_top_1_test_accuracy": 0.6624000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6916,
|
154 |
+
"llm_top_5_test_accuracy": 0.7562,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9670000314712525,
|
160 |
+
"sae_top_1_test_accuracy": 0.6474,
|
161 |
+
"sae_top_2_test_accuracy": 0.7102,
|
162 |
+
"sae_top_5_test_accuracy": 0.8493999999999999,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9472500383853912,
|
171 |
+
"llm_top_1_test_accuracy": 0.6375000000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.77325,
|
173 |
+
"llm_top_5_test_accuracy": 0.82525,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9500000476837158,
|
179 |
+
"sae_top_1_test_accuracy": 0.76675,
|
180 |
+
"sae_top_2_test_accuracy": 0.8322499999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.8787499999999999,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9998000144958497,
|
190 |
+
"llm_top_1_test_accuracy": 0.638,
|
191 |
+
"llm_top_2_test_accuracy": 0.792,
|
192 |
+
"llm_top_5_test_accuracy": 0.908,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9986000537872315,
|
198 |
+
"sae_top_1_test_accuracy": 0.704,
|
199 |
+
"sae_top_2_test_accuracy": 0.8906000000000001,
|
200 |
+
"sae_top_5_test_accuracy": 0.984,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matryoshka_gemma-2-2b-16k-v2_matryoshka_google_gemma-2-2b_random_matryoshka_batch_top_k_resid_post_layer_12_trainer_10",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9430000185966492,
|
240 |
+
"1": 0.9650000333786011,
|
241 |
+
"2": 0.9510000348091125,
|
242 |
+
"6": 0.9860000610351562,
|
243 |
+
"9": 0.9770000576972961
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9530000686645508,
|
249 |
+
"6": 0.987000048160553,
|
250 |
+
"9": 0.9760000705718994
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.577,
|
254 |
+
"1": 0.614,
|
255 |
+
"2": 0.662,
|
256 |
+
"6": 0.786,
|
257 |
+
"9": 0.561
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.573,
|
261 |
+
"1": 0.662,
|
262 |
+
"2": 0.719,
|
263 |
+
"6": 0.811,
|
264 |
+
"9": 0.715
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.714,
|
268 |
+
"1": 0.712,
|
269 |
+
"2": 0.755,
|
270 |
+
"6": 0.895,
|
271 |
+
"9": 0.862
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.571,
|
275 |
+
"1": 0.632,
|
276 |
+
"2": 0.861,
|
277 |
+
"6": 0.792,
|
278 |
+
"9": 0.925
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.608,
|
282 |
+
"1": 0.644,
|
283 |
+
"2": 0.868,
|
284 |
+
"6": 0.981,
|
285 |
+
"9": 0.933
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.729,
|
289 |
+
"1": 0.828,
|
290 |
+
"2": 0.866,
|
291 |
+
"6": 0.984,
|
292 |
+
"9": 0.942
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9600000381469727,
|
298 |
+
"13": 0.9580000638961792,
|
299 |
+
"14": 0.9500000476837158,
|
300 |
+
"18": 0.9070000648498535,
|
301 |
+
"19": 0.9640000462532043
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9660000205039978,
|
305 |
+
"13": 0.9610000252723694,
|
306 |
+
"14": 0.9550000429153442,
|
307 |
+
"18": 0.9350000619888306,
|
308 |
+
"19": 0.9650000333786011
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.552,
|
312 |
+
"13": 0.677,
|
313 |
+
"14": 0.634,
|
314 |
+
"18": 0.697,
|
315 |
+
"19": 0.805
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.68,
|
319 |
+
"13": 0.703,
|
320 |
+
"14": 0.68,
|
321 |
+
"18": 0.728,
|
322 |
+
"19": 0.778
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.785,
|
326 |
+
"13": 0.747,
|
327 |
+
"14": 0.71,
|
328 |
+
"18": 0.737,
|
329 |
+
"19": 0.836
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.557,
|
333 |
+
"13": 0.667,
|
334 |
+
"14": 0.646,
|
335 |
+
"18": 0.695,
|
336 |
+
"19": 0.792
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.762,
|
340 |
+
"13": 0.665,
|
341 |
+
"14": 0.857,
|
342 |
+
"18": 0.721,
|
343 |
+
"19": 0.828
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.952,
|
347 |
+
"13": 0.863,
|
348 |
+
"14": 0.865,
|
349 |
+
"18": 0.762,
|
350 |
+
"19": 0.844
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9500000476837158,
|
356 |
+
"21": 0.9230000376701355,
|
357 |
+
"22": 0.9100000262260437,
|
358 |
+
"25": 0.9550000429153442,
|
359 |
+
"26": 0.8870000243186951
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9470000267028809,
|
363 |
+
"21": 0.9190000295639038,
|
364 |
+
"22": 0.9130000472068787,
|
365 |
+
"25": 0.9650000333786011,
|
366 |
+
"26": 0.8860000371932983
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.715,
|
370 |
+
"21": 0.774,
|
371 |
+
"22": 0.658,
|
372 |
+
"25": 0.695,
|
373 |
+
"26": 0.63
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.803,
|
377 |
+
"21": 0.754,
|
378 |
+
"22": 0.685,
|
379 |
+
"25": 0.76,
|
380 |
+
"26": 0.687
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.818,
|
384 |
+
"21": 0.796,
|
385 |
+
"22": 0.664,
|
386 |
+
"25": 0.788,
|
387 |
+
"26": 0.676
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.835,
|
391 |
+
"21": 0.688,
|
392 |
+
"22": 0.731,
|
393 |
+
"25": 0.701,
|
394 |
+
"26": 0.645
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.871,
|
398 |
+
"21": 0.72,
|
399 |
+
"22": 0.767,
|
400 |
+
"25": 0.862,
|
401 |
+
"26": 0.731
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.89,
|
405 |
+
"21": 0.736,
|
406 |
+
"22": 0.787,
|
407 |
+
"25": 0.886,
|
408 |
+
"26": 0.754
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9500000476837158,
|
414 |
+
"2": 0.9320000410079956,
|
415 |
+
"3": 0.9020000696182251,
|
416 |
+
"5": 0.9070000648498535,
|
417 |
+
"6": 0.8710000514984131
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9480000734329224,
|
421 |
+
"2": 0.9350000619888306,
|
422 |
+
"3": 0.9130000472068787,
|
423 |
+
"5": 0.921000063419342,
|
424 |
+
"6": 0.8600000143051147
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.708,
|
428 |
+
"2": 0.597,
|
429 |
+
"3": 0.586,
|
430 |
+
"5": 0.568,
|
431 |
+
"6": 0.589
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.738,
|
435 |
+
"2": 0.663,
|
436 |
+
"3": 0.599,
|
437 |
+
"5": 0.596,
|
438 |
+
"6": 0.635
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.785,
|
442 |
+
"2": 0.648,
|
443 |
+
"3": 0.642,
|
444 |
+
"5": 0.644,
|
445 |
+
"6": 0.694
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.656,
|
449 |
+
"2": 0.725,
|
450 |
+
"3": 0.605,
|
451 |
+
"5": 0.866,
|
452 |
+
"6": 0.729
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.815,
|
456 |
+
"2": 0.757,
|
457 |
+
"3": 0.682,
|
458 |
+
"5": 0.862,
|
459 |
+
"6": 0.733
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.897,
|
463 |
+
"2": 0.853,
|
464 |
+
"3": 0.684,
|
465 |
+
"5": 0.887,
|
466 |
+
"6": 0.75
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.971000075340271,
|
472 |
+
"5.0": 0.968000054359436
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9820000529289246,
|
476 |
+
"5.0": 0.9800000190734863
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.673,
|
480 |
+
"5.0": 0.673
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.864,
|
492 |
+
"5.0": 0.864
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.863,
|
496 |
+
"5.0": 0.863
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.907,
|
500 |
+
"5.0": 0.907
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9590000510215759,
|
506 |
+
"Python": 0.984000027179718,
|
507 |
+
"HTML": 0.984000027179718,
|
508 |
+
"Java": 0.956000030040741,
|
509 |
+
"PHP": 0.9520000219345093
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9630000591278076,
|
513 |
+
"Python": 0.984000027179718,
|
514 |
+
"HTML": 0.9830000400543213,
|
515 |
+
"Java": 0.9660000205039978,
|
516 |
+
"PHP": 0.9580000638961792
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.662,
|
520 |
+
"Python": 0.637,
|
521 |
+
"HTML": 0.793,
|
522 |
+
"Java": 0.629,
|
523 |
+
"PHP": 0.591
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.664,
|
527 |
+
"Python": 0.674,
|
528 |
+
"HTML": 0.799,
|
529 |
+
"Java": 0.685,
|
530 |
+
"PHP": 0.636
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.751,
|
534 |
+
"Python": 0.725,
|
535 |
+
"HTML": 0.902,
|
536 |
+
"Java": 0.731,
|
537 |
+
"PHP": 0.672
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.652,
|
541 |
+
"Python": 0.651,
|
542 |
+
"HTML": 0.694,
|
543 |
+
"Java": 0.649,
|
544 |
+
"PHP": 0.591
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.701,
|
548 |
+
"Python": 0.679,
|
549 |
+
"HTML": 0.928,
|
550 |
+
"Java": 0.646,
|
551 |
+
"PHP": 0.597
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.695,
|
555 |
+
"Python": 0.939,
|
556 |
+
"HTML": 0.948,
|
557 |
+
"Java": 0.733,
|
558 |
+
"PHP": 0.932
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9390000700950623,
|
564 |
+
"1": 0.984000027179718,
|
565 |
+
"2": 0.9290000200271606,
|
566 |
+
"3": 0.9480000734329224
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9340000152587891,
|
570 |
+
"1": 0.9890000224113464,
|
571 |
+
"2": 0.9220000505447388,
|
572 |
+
"3": 0.9440000653266907
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.561,
|
576 |
+
"1": 0.663,
|
577 |
+
"2": 0.671,
|
578 |
+
"3": 0.655
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.798,
|
582 |
+
"1": 0.804,
|
583 |
+
"2": 0.692,
|
584 |
+
"3": 0.799
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.822,
|
588 |
+
"1": 0.885,
|
589 |
+
"2": 0.757,
|
590 |
+
"3": 0.837
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.754,
|
594 |
+
"1": 0.952,
|
595 |
+
"2": 0.706,
|
596 |
+
"3": 0.655
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.857,
|
600 |
+
"1": 0.952,
|
601 |
+
"2": 0.829,
|
602 |
+
"3": 0.691
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.862,
|
606 |
+
"1": 0.957,
|
607 |
+
"2": 0.83,
|
608 |
+
"3": 0.866
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.9980000257492065,
|
614 |
+
"fr": 0.999000072479248,
|
615 |
+
"de": 0.999000072479248,
|
616 |
+
"es": 0.9980000257492065,
|
617 |
+
"nl": 0.999000072479248
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 0.999000072479248
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.741,
|
628 |
+
"fr": 0.579,
|
629 |
+
"de": 0.755,
|
630 |
+
"es": 0.481,
|
631 |
+
"nl": 0.634
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.838,
|
635 |
+
"fr": 0.58,
|
636 |
+
"de": 0.825,
|
637 |
+
"es": 0.956,
|
638 |
+
"nl": 0.761
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.897,
|
642 |
+
"fr": 0.917,
|
643 |
+
"de": 0.893,
|
644 |
+
"es": 0.978,
|
645 |
+
"nl": 0.855
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.747,
|
649 |
+
"fr": 0.602,
|
650 |
+
"de": 0.915,
|
651 |
+
"es": 0.603,
|
652 |
+
"nl": 0.653
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 1.0,
|
656 |
+
"fr": 0.981,
|
657 |
+
"de": 0.924,
|
658 |
+
"es": 0.864,
|
659 |
+
"nl": 0.684
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.999,
|
663 |
+
"fr": 0.99,
|
664 |
+
"de": 0.943,
|
665 |
+
"es": 0.99,
|
666 |
+
"nl": 0.998
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "267c424b-c891-4e69-ae62-7280dd6c4fff",
|
30 |
+
"datetime_epoch_millis": 1736906900052,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9517437667071819,
|
34 |
+
"llm_top_1_test_accuracy": 0.66830625,
|
35 |
+
"llm_top_2_test_accuracy": 0.7204250000000001,
|
36 |
+
"llm_top_5_test_accuracy": 0.7800625,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9565875533968211,
|
44 |
+
"sae_top_1_test_accuracy": 0.75943125,
|
45 |
+
"sae_top_2_test_accuracy": 0.79540625,
|
46 |
+
"sae_top_5_test_accuracy": 0.86763125,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9598000000000001,
|
57 |
+
"llm_top_1_test_accuracy": 0.6439999999999999,
|
58 |
+
"llm_top_2_test_accuracy": 0.6912,
|
59 |
+
"llm_top_5_test_accuracy": 0.7918000000000001,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.963800048828125,
|
65 |
+
"sae_top_1_test_accuracy": 0.77,
|
66 |
+
"sae_top_2_test_accuracy": 0.8089999999999999,
|
67 |
+
"sae_top_5_test_accuracy": 0.9046,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9477999999999998,
|
76 |
+
"llm_top_1_test_accuracy": 0.6696,
|
77 |
+
"llm_top_2_test_accuracy": 0.7174,
|
78 |
+
"llm_top_5_test_accuracy": 0.7707999999999999,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9520000576972961,
|
84 |
+
"sae_top_1_test_accuracy": 0.6894,
|
85 |
+
"sae_top_2_test_accuracy": 0.7662,
|
86 |
+
"sae_top_5_test_accuracy": 0.8196,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9168,
|
95 |
+
"llm_top_1_test_accuracy": 0.6839999999999999,
|
96 |
+
"llm_top_2_test_accuracy": 0.7392,
|
97 |
+
"llm_top_5_test_accuracy": 0.7489999999999999,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9254000663757325,
|
103 |
+
"sae_top_1_test_accuracy": 0.7936000000000001,
|
104 |
+
"sae_top_2_test_accuracy": 0.7964,
|
105 |
+
"sae_top_5_test_accuracy": 0.868,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.8932,
|
114 |
+
"llm_top_1_test_accuracy": 0.594,
|
115 |
+
"llm_top_2_test_accuracy": 0.6397999999999999,
|
116 |
+
"llm_top_5_test_accuracy": 0.6842,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9166000485420227,
|
122 |
+
"sae_top_1_test_accuracy": 0.7586,
|
123 |
+
"sae_top_2_test_accuracy": 0.7764,
|
124 |
+
"sae_top_5_test_accuracy": 0.8161999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9815,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9760000705718994,
|
141 |
+
"sae_top_1_test_accuracy": 0.904,
|
142 |
+
"sae_top_2_test_accuracy": 0.905,
|
143 |
+
"sae_top_5_test_accuracy": 0.906,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9690000534057617,
|
152 |
+
"llm_top_1_test_accuracy": 0.6642,
|
153 |
+
"llm_top_2_test_accuracy": 0.6954,
|
154 |
+
"llm_top_5_test_accuracy": 0.7666,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9710000395774842,
|
160 |
+
"sae_top_1_test_accuracy": 0.6354000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.6686,
|
162 |
+
"sae_top_5_test_accuracy": 0.8156000000000001,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9462500512599945,
|
171 |
+
"llm_top_1_test_accuracy": 0.6892500000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7729999999999999,
|
173 |
+
"llm_top_5_test_accuracy": 0.8185,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9485000520944595,
|
179 |
+
"sae_top_1_test_accuracy": 0.68525,
|
180 |
+
"sae_top_2_test_accuracy": 0.71325,
|
181 |
+
"sae_top_5_test_accuracy": 0.81625,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.7294,
|
191 |
+
"llm_top_2_test_accuracy": 0.7834000000000001,
|
192 |
+
"llm_top_5_test_accuracy": 0.8936,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9994000434875489,
|
198 |
+
"sae_top_1_test_accuracy": 0.8392,
|
199 |
+
"sae_top_2_test_accuracy": 0.9284000000000001,
|
200 |
+
"sae_top_5_test_accuracy": 0.9948,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2",
|
210 |
+
"sae_lens_version": "5.3.0",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "14c41a7d-c6d0-4f0d-b1c4-6b423ba7e5e7",
|
30 |
+
"datetime_epoch_millis": 1737158029483,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9586687814444304,
|
34 |
+
"llm_top_1_test_accuracy": 0.65339375,
|
35 |
+
"llm_top_2_test_accuracy": 0.7189875000000001,
|
36 |
+
"llm_top_5_test_accuracy": 0.7788625,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9532750476151705,
|
44 |
+
"sae_top_1_test_accuracy": 0.73613125,
|
45 |
+
"sae_top_2_test_accuracy": 0.82208125,
|
46 |
+
"sae_top_5_test_accuracy": 0.8863374999999999,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9612000584602356,
|
65 |
+
"sae_top_1_test_accuracy": 0.7634000000000001,
|
66 |
+
"sae_top_2_test_accuracy": 0.8208,
|
67 |
+
"sae_top_5_test_accuracy": 0.9061999999999999,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9530000329017639,
|
76 |
+
"llm_top_1_test_accuracy": 0.6716,
|
77 |
+
"llm_top_2_test_accuracy": 0.7292,
|
78 |
+
"llm_top_5_test_accuracy": 0.7602,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9436000466346741,
|
84 |
+
"sae_top_1_test_accuracy": 0.6818,
|
85 |
+
"sae_top_2_test_accuracy": 0.7838,
|
86 |
+
"sae_top_5_test_accuracy": 0.8421999999999998,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9266000390052795,
|
95 |
+
"llm_top_1_test_accuracy": 0.6862,
|
96 |
+
"llm_top_2_test_accuracy": 0.738,
|
97 |
+
"llm_top_5_test_accuracy": 0.7498,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9210000395774841,
|
103 |
+
"sae_top_1_test_accuracy": 0.7034,
|
104 |
+
"sae_top_2_test_accuracy": 0.8242,
|
105 |
+
"sae_top_5_test_accuracy": 0.8746,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9152000427246094,
|
114 |
+
"llm_top_1_test_accuracy": 0.6066,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466000000000001,
|
116 |
+
"llm_top_5_test_accuracy": 0.6782,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9148000478744507,
|
122 |
+
"sae_top_1_test_accuracy": 0.7053999999999999,
|
123 |
+
"sae_top_2_test_accuracy": 0.754,
|
124 |
+
"sae_top_5_test_accuracy": 0.8253999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9685000479221344,
|
141 |
+
"sae_top_1_test_accuracy": 0.756,
|
142 |
+
"sae_top_2_test_accuracy": 0.826,
|
143 |
+
"sae_top_5_test_accuracy": 0.917,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9704000353813171,
|
152 |
+
"llm_top_1_test_accuracy": 0.6472,
|
153 |
+
"llm_top_2_test_accuracy": 0.6946,
|
154 |
+
"llm_top_5_test_accuracy": 0.7550000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.967400050163269,
|
160 |
+
"sae_top_1_test_accuracy": 0.6288,
|
161 |
+
"sae_top_2_test_accuracy": 0.8034000000000001,
|
162 |
+
"sae_top_5_test_accuracy": 0.8657999999999999,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9537500292062759,
|
171 |
+
"llm_top_1_test_accuracy": 0.64475,
|
172 |
+
"llm_top_2_test_accuracy": 0.7494999999999999,
|
173 |
+
"llm_top_5_test_accuracy": 0.8285,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9515000432729721,
|
179 |
+
"sae_top_1_test_accuracy": 0.7512499999999999,
|
180 |
+
"sae_top_2_test_accuracy": 0.8362499999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.8674999999999999,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 1.0,
|
190 |
+
"llm_top_1_test_accuracy": 0.6552,
|
191 |
+
"llm_top_2_test_accuracy": 0.7826,
|
192 |
+
"llm_top_5_test_accuracy": 0.9024000000000001,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9982000470161438,
|
198 |
+
"sae_top_1_test_accuracy": 0.899,
|
199 |
+
"sae_top_2_test_accuracy": 0.9282,
|
200 |
+
"sae_top_5_test_accuracy": 0.992,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1",
|
210 |
+
"sae_lens_version": "5.3.0",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9440000653266907,
|
240 |
+
"1": 0.9580000638961792,
|
241 |
+
"2": 0.9450000524520874,
|
242 |
+
"6": 0.9850000739097595,
|
243 |
+
"9": 0.9740000367164612
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.556,
|
275 |
+
"1": 0.633,
|
276 |
+
"2": 0.883,
|
277 |
+
"6": 0.819,
|
278 |
+
"9": 0.926
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.605,
|
282 |
+
"1": 0.696,
|
283 |
+
"2": 0.889,
|
284 |
+
"6": 0.978,
|
285 |
+
"9": 0.936
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.881,
|
289 |
+
"1": 0.848,
|
290 |
+
"2": 0.892,
|
291 |
+
"6": 0.973,
|
292 |
+
"9": 0.937
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9580000638961792,
|
298 |
+
"13": 0.9470000267028809,
|
299 |
+
"14": 0.940000057220459,
|
300 |
+
"18": 0.921000063419342,
|
301 |
+
"19": 0.9520000219345093
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9700000286102295,
|
305 |
+
"13": 0.9470000267028809,
|
306 |
+
"14": 0.9550000429153442,
|
307 |
+
"18": 0.9270000457763672,
|
308 |
+
"19": 0.9660000205039978
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.57,
|
312 |
+
"13": 0.67,
|
313 |
+
"14": 0.639,
|
314 |
+
"18": 0.691,
|
315 |
+
"19": 0.788
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.751,
|
319 |
+
"13": 0.721,
|
320 |
+
"14": 0.684,
|
321 |
+
"18": 0.714,
|
322 |
+
"19": 0.776
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.772,
|
326 |
+
"13": 0.748,
|
327 |
+
"14": 0.727,
|
328 |
+
"18": 0.722,
|
329 |
+
"19": 0.832
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.57,
|
333 |
+
"13": 0.673,
|
334 |
+
"14": 0.65,
|
335 |
+
"18": 0.712,
|
336 |
+
"19": 0.804
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.771,
|
340 |
+
"13": 0.711,
|
341 |
+
"14": 0.86,
|
342 |
+
"18": 0.737,
|
343 |
+
"19": 0.84
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.843,
|
347 |
+
"13": 0.86,
|
348 |
+
"14": 0.88,
|
349 |
+
"18": 0.754,
|
350 |
+
"19": 0.874
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9540000557899475,
|
356 |
+
"21": 0.9190000295639038,
|
357 |
+
"22": 0.9010000228881836,
|
358 |
+
"25": 0.9580000638961792,
|
359 |
+
"26": 0.8730000257492065
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9520000219345093,
|
363 |
+
"21": 0.9240000247955322,
|
364 |
+
"22": 0.9190000295639038,
|
365 |
+
"25": 0.9580000638961792,
|
366 |
+
"26": 0.8800000548362732
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.709,
|
370 |
+
"21": 0.761,
|
371 |
+
"22": 0.641,
|
372 |
+
"25": 0.701,
|
373 |
+
"26": 0.619
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.801,
|
377 |
+
"21": 0.776,
|
378 |
+
"22": 0.678,
|
379 |
+
"25": 0.753,
|
380 |
+
"26": 0.682
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.812,
|
384 |
+
"21": 0.806,
|
385 |
+
"22": 0.687,
|
386 |
+
"25": 0.791,
|
387 |
+
"26": 0.653
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.902,
|
391 |
+
"21": 0.793,
|
392 |
+
"22": 0.481,
|
393 |
+
"25": 0.706,
|
394 |
+
"26": 0.635
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.908,
|
398 |
+
"21": 0.829,
|
399 |
+
"22": 0.814,
|
400 |
+
"25": 0.858,
|
401 |
+
"26": 0.712
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.928,
|
405 |
+
"21": 0.862,
|
406 |
+
"22": 0.878,
|
407 |
+
"25": 0.908,
|
408 |
+
"26": 0.797
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9460000395774841,
|
414 |
+
"2": 0.921000063419342,
|
415 |
+
"3": 0.9190000295639038,
|
416 |
+
"5": 0.9180000424385071,
|
417 |
+
"6": 0.8700000643730164
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9470000267028809,
|
421 |
+
"2": 0.9280000329017639,
|
422 |
+
"3": 0.9130000472068787,
|
423 |
+
"5": 0.9260000586509705,
|
424 |
+
"6": 0.862000048160553
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.662,
|
428 |
+
"2": 0.612,
|
429 |
+
"3": 0.601,
|
430 |
+
"5": 0.575,
|
431 |
+
"6": 0.583
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.749,
|
435 |
+
"2": 0.661,
|
436 |
+
"3": 0.62,
|
437 |
+
"5": 0.58,
|
438 |
+
"6": 0.623
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.758,
|
442 |
+
"2": 0.642,
|
443 |
+
"3": 0.628,
|
444 |
+
"5": 0.665,
|
445 |
+
"6": 0.698
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.887,
|
449 |
+
"2": 0.605,
|
450 |
+
"3": 0.618,
|
451 |
+
"5": 0.811,
|
452 |
+
"6": 0.606
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.9,
|
456 |
+
"2": 0.649,
|
457 |
+
"3": 0.621,
|
458 |
+
"5": 0.827,
|
459 |
+
"6": 0.773
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.914,
|
463 |
+
"2": 0.857,
|
464 |
+
"3": 0.743,
|
465 |
+
"5": 0.858,
|
466 |
+
"6": 0.755
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.968000054359436,
|
472 |
+
"5.0": 0.9690000414848328
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9800000190734863,
|
476 |
+
"5.0": 0.9820000529289246
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.756,
|
492 |
+
"5.0": 0.756
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.826,
|
496 |
+
"5.0": 0.826
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.917,
|
500 |
+
"5.0": 0.917
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9600000381469727,
|
506 |
+
"Python": 0.9770000576972961,
|
507 |
+
"HTML": 0.987000048160553,
|
508 |
+
"Java": 0.9640000462532043,
|
509 |
+
"PHP": 0.9490000605583191
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9570000171661377,
|
513 |
+
"Python": 0.9890000224113464,
|
514 |
+
"HTML": 0.9910000562667847,
|
515 |
+
"Java": 0.9590000510215759,
|
516 |
+
"PHP": 0.956000030040741
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.67,
|
520 |
+
"Python": 0.636,
|
521 |
+
"HTML": 0.735,
|
522 |
+
"Java": 0.611,
|
523 |
+
"PHP": 0.584
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.661,
|
527 |
+
"Python": 0.678,
|
528 |
+
"HTML": 0.793,
|
529 |
+
"Java": 0.687,
|
530 |
+
"PHP": 0.654
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.743,
|
534 |
+
"Python": 0.718,
|
535 |
+
"HTML": 0.902,
|
536 |
+
"Java": 0.729,
|
537 |
+
"PHP": 0.683
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.605,
|
541 |
+
"Python": 0.638,
|
542 |
+
"HTML": 0.685,
|
543 |
+
"Java": 0.624,
|
544 |
+
"PHP": 0.592
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.641,
|
548 |
+
"Python": 0.913,
|
549 |
+
"HTML": 0.9,
|
550 |
+
"Java": 0.648,
|
551 |
+
"PHP": 0.915
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.847,
|
555 |
+
"Python": 0.927,
|
556 |
+
"HTML": 0.945,
|
557 |
+
"Java": 0.679,
|
558 |
+
"PHP": 0.931
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9460000395774841,
|
564 |
+
"1": 0.9770000576972961,
|
565 |
+
"2": 0.9330000281333923,
|
566 |
+
"3": 0.9500000476837158
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9430000185966492,
|
570 |
+
"1": 0.984000027179718,
|
571 |
+
"2": 0.937000036239624,
|
572 |
+
"3": 0.9510000348091125
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.574,
|
576 |
+
"1": 0.684,
|
577 |
+
"2": 0.673,
|
578 |
+
"3": 0.648
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.695,
|
582 |
+
"1": 0.798,
|
583 |
+
"2": 0.695,
|
584 |
+
"3": 0.81
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.82,
|
588 |
+
"1": 0.88,
|
589 |
+
"2": 0.761,
|
590 |
+
"3": 0.853
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.82,
|
594 |
+
"1": 0.706,
|
595 |
+
"2": 0.821,
|
596 |
+
"3": 0.658
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.836,
|
600 |
+
"1": 0.907,
|
601 |
+
"2": 0.847,
|
602 |
+
"3": 0.755
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.843,
|
606 |
+
"1": 0.96,
|
607 |
+
"2": 0.85,
|
608 |
+
"3": 0.817
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 1.0,
|
614 |
+
"fr": 0.999000072479248,
|
615 |
+
"de": 0.999000072479248,
|
616 |
+
"es": 0.9980000257492065,
|
617 |
+
"nl": 0.9950000643730164
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.746,
|
628 |
+
"fr": 0.605,
|
629 |
+
"de": 0.751,
|
630 |
+
"es": 0.518,
|
631 |
+
"nl": 0.656
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.818,
|
635 |
+
"fr": 0.597,
|
636 |
+
"de": 0.828,
|
637 |
+
"es": 0.911,
|
638 |
+
"nl": 0.759
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.887,
|
642 |
+
"fr": 0.912,
|
643 |
+
"de": 0.875,
|
644 |
+
"es": 0.983,
|
645 |
+
"nl": 0.855
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.998,
|
649 |
+
"fr": 0.994,
|
650 |
+
"de": 0.905,
|
651 |
+
"es": 0.869,
|
652 |
+
"nl": 0.729
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.999,
|
656 |
+
"fr": 0.99,
|
657 |
+
"de": 0.905,
|
658 |
+
"es": 0.991,
|
659 |
+
"nl": 0.756
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.997,
|
663 |
+
"fr": 0.994,
|
664 |
+
"de": 0.976,
|
665 |
+
"es": 0.993,
|
666 |
+
"nl": 1.0
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "9e0eae70-5492-4ca9-aa99-2257e3f140b5",
|
30 |
+
"datetime_epoch_millis": 1737158323660,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9586687814444304,
|
34 |
+
"llm_top_1_test_accuracy": 0.65339375,
|
35 |
+
"llm_top_2_test_accuracy": 0.7189875000000001,
|
36 |
+
"llm_top_5_test_accuracy": 0.7788625,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9564437951892614,
|
44 |
+
"sae_top_1_test_accuracy": 0.7706000000000001,
|
45 |
+
"sae_top_2_test_accuracy": 0.81841875,
|
46 |
+
"sae_top_5_test_accuracy": 0.8757687500000001,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9652000427246094,
|
65 |
+
"sae_top_1_test_accuracy": 0.7653999999999999,
|
66 |
+
"sae_top_2_test_accuracy": 0.8029999999999999,
|
67 |
+
"sae_top_5_test_accuracy": 0.89,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9530000329017639,
|
76 |
+
"llm_top_1_test_accuracy": 0.6716,
|
77 |
+
"llm_top_2_test_accuracy": 0.7292,
|
78 |
+
"llm_top_5_test_accuracy": 0.7602,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9508000493049622,
|
84 |
+
"sae_top_1_test_accuracy": 0.7292,
|
85 |
+
"sae_top_2_test_accuracy": 0.7874,
|
86 |
+
"sae_top_5_test_accuracy": 0.8263999999999999,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9266000390052795,
|
95 |
+
"llm_top_1_test_accuracy": 0.6862,
|
96 |
+
"llm_top_2_test_accuracy": 0.738,
|
97 |
+
"llm_top_5_test_accuracy": 0.7498,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9280000448226928,
|
103 |
+
"sae_top_1_test_accuracy": 0.7872000000000001,
|
104 |
+
"sae_top_2_test_accuracy": 0.8196,
|
105 |
+
"sae_top_5_test_accuracy": 0.8474,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9152000427246094,
|
114 |
+
"llm_top_1_test_accuracy": 0.6066,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466000000000001,
|
116 |
+
"llm_top_5_test_accuracy": 0.6782,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9162000417709351,
|
122 |
+
"sae_top_1_test_accuracy": 0.7445999999999999,
|
123 |
+
"sae_top_2_test_accuracy": 0.7756000000000001,
|
124 |
+
"sae_top_5_test_accuracy": 0.8150000000000001,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9705000519752502,
|
141 |
+
"sae_top_1_test_accuracy": 0.784,
|
142 |
+
"sae_top_2_test_accuracy": 0.84,
|
143 |
+
"sae_top_5_test_accuracy": 0.937,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9704000353813171,
|
152 |
+
"llm_top_1_test_accuracy": 0.6472,
|
153 |
+
"llm_top_2_test_accuracy": 0.6946,
|
154 |
+
"llm_top_5_test_accuracy": 0.7550000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.968600046634674,
|
160 |
+
"sae_top_1_test_accuracy": 0.6508,
|
161 |
+
"sae_top_2_test_accuracy": 0.7198,
|
162 |
+
"sae_top_5_test_accuracy": 0.8112,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9537500292062759,
|
171 |
+
"llm_top_1_test_accuracy": 0.64475,
|
172 |
+
"llm_top_2_test_accuracy": 0.7494999999999999,
|
173 |
+
"llm_top_5_test_accuracy": 0.8285,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9532500356435776,
|
179 |
+
"sae_top_1_test_accuracy": 0.8150000000000001,
|
180 |
+
"sae_top_2_test_accuracy": 0.8697499999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.8897499999999999,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 1.0,
|
190 |
+
"llm_top_1_test_accuracy": 0.6552,
|
191 |
+
"llm_top_2_test_accuracy": 0.7826,
|
192 |
+
"llm_top_5_test_accuracy": 0.9024000000000001,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9990000486373901,
|
198 |
+
"sae_top_1_test_accuracy": 0.8886,
|
199 |
+
"sae_top_2_test_accuracy": 0.9321999999999999,
|
200 |
+
"sae_top_5_test_accuracy": 0.9894000000000001,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_2",
|
210 |
+
"sae_lens_version": "5.3.0",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9460000395774841,
|
240 |
+
"1": 0.9610000252723694,
|
241 |
+
"2": 0.9550000429153442,
|
242 |
+
"6": 0.9860000610351562,
|
243 |
+
"9": 0.9780000448226929
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.594,
|
275 |
+
"1": 0.625,
|
276 |
+
"2": 0.852,
|
277 |
+
"6": 0.824,
|
278 |
+
"9": 0.932
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.608,
|
282 |
+
"1": 0.62,
|
283 |
+
"2": 0.868,
|
284 |
+
"6": 0.98,
|
285 |
+
"9": 0.939
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.795,
|
289 |
+
"1": 0.861,
|
290 |
+
"2": 0.891,
|
291 |
+
"6": 0.977,
|
292 |
+
"9": 0.926
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9630000591278076,
|
298 |
+
"13": 0.9540000557899475,
|
299 |
+
"14": 0.9550000429153442,
|
300 |
+
"18": 0.9190000295639038,
|
301 |
+
"19": 0.9630000591278076
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9700000286102295,
|
305 |
+
"13": 0.9470000267028809,
|
306 |
+
"14": 0.9550000429153442,
|
307 |
+
"18": 0.9270000457763672,
|
308 |
+
"19": 0.9660000205039978
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.57,
|
312 |
+
"13": 0.67,
|
313 |
+
"14": 0.639,
|
314 |
+
"18": 0.691,
|
315 |
+
"19": 0.788
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.751,
|
319 |
+
"13": 0.721,
|
320 |
+
"14": 0.684,
|
321 |
+
"18": 0.714,
|
322 |
+
"19": 0.776
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.772,
|
326 |
+
"13": 0.748,
|
327 |
+
"14": 0.727,
|
328 |
+
"18": 0.722,
|
329 |
+
"19": 0.832
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.856,
|
333 |
+
"13": 0.683,
|
334 |
+
"14": 0.64,
|
335 |
+
"18": 0.675,
|
336 |
+
"19": 0.792
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.856,
|
340 |
+
"13": 0.673,
|
341 |
+
"14": 0.859,
|
342 |
+
"18": 0.699,
|
343 |
+
"19": 0.85
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.858,
|
347 |
+
"13": 0.812,
|
348 |
+
"14": 0.861,
|
349 |
+
"18": 0.753,
|
350 |
+
"19": 0.848
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9540000557899475,
|
356 |
+
"21": 0.9190000295639038,
|
357 |
+
"22": 0.906000018119812,
|
358 |
+
"25": 0.9620000720024109,
|
359 |
+
"26": 0.8990000486373901
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9520000219345093,
|
363 |
+
"21": 0.9240000247955322,
|
364 |
+
"22": 0.9190000295639038,
|
365 |
+
"25": 0.9580000638961792,
|
366 |
+
"26": 0.8800000548362732
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.709,
|
370 |
+
"21": 0.761,
|
371 |
+
"22": 0.641,
|
372 |
+
"25": 0.701,
|
373 |
+
"26": 0.619
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.801,
|
377 |
+
"21": 0.776,
|
378 |
+
"22": 0.678,
|
379 |
+
"25": 0.753,
|
380 |
+
"26": 0.682
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.812,
|
384 |
+
"21": 0.806,
|
385 |
+
"22": 0.687,
|
386 |
+
"25": 0.791,
|
387 |
+
"26": 0.653
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.891,
|
391 |
+
"21": 0.798,
|
392 |
+
"22": 0.777,
|
393 |
+
"25": 0.874,
|
394 |
+
"26": 0.596
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.907,
|
398 |
+
"21": 0.821,
|
399 |
+
"22": 0.815,
|
400 |
+
"25": 0.862,
|
401 |
+
"26": 0.693
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.916,
|
405 |
+
"21": 0.844,
|
406 |
+
"22": 0.839,
|
407 |
+
"25": 0.88,
|
408 |
+
"26": 0.758
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9270000457763672,
|
414 |
+
"2": 0.9470000267028809,
|
415 |
+
"3": 0.9190000295639038,
|
416 |
+
"5": 0.9130000472068787,
|
417 |
+
"6": 0.8750000596046448
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9470000267028809,
|
421 |
+
"2": 0.9280000329017639,
|
422 |
+
"3": 0.9130000472068787,
|
423 |
+
"5": 0.9260000586509705,
|
424 |
+
"6": 0.862000048160553
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.662,
|
428 |
+
"2": 0.612,
|
429 |
+
"3": 0.601,
|
430 |
+
"5": 0.575,
|
431 |
+
"6": 0.583
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.749,
|
435 |
+
"2": 0.661,
|
436 |
+
"3": 0.62,
|
437 |
+
"5": 0.58,
|
438 |
+
"6": 0.623
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.758,
|
442 |
+
"2": 0.642,
|
443 |
+
"3": 0.628,
|
444 |
+
"5": 0.665,
|
445 |
+
"6": 0.698
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.813,
|
449 |
+
"2": 0.691,
|
450 |
+
"3": 0.695,
|
451 |
+
"5": 0.783,
|
452 |
+
"6": 0.741
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.876,
|
456 |
+
"2": 0.742,
|
457 |
+
"3": 0.701,
|
458 |
+
"5": 0.809,
|
459 |
+
"6": 0.75
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.917,
|
463 |
+
"2": 0.886,
|
464 |
+
"3": 0.7,
|
465 |
+
"5": 0.816,
|
466 |
+
"6": 0.756
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.971000075340271,
|
472 |
+
"5.0": 0.9700000286102295
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9800000190734863,
|
476 |
+
"5.0": 0.9820000529289246
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.784,
|
492 |
+
"5.0": 0.784
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.84,
|
496 |
+
"5.0": 0.84
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.937,
|
500 |
+
"5.0": 0.937
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9510000348091125,
|
506 |
+
"Python": 0.987000048160553,
|
507 |
+
"HTML": 0.9900000691413879,
|
508 |
+
"Java": 0.9610000252723694,
|
509 |
+
"PHP": 0.9540000557899475
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9570000171661377,
|
513 |
+
"Python": 0.9890000224113464,
|
514 |
+
"HTML": 0.9910000562667847,
|
515 |
+
"Java": 0.9590000510215759,
|
516 |
+
"PHP": 0.956000030040741
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.67,
|
520 |
+
"Python": 0.636,
|
521 |
+
"HTML": 0.735,
|
522 |
+
"Java": 0.611,
|
523 |
+
"PHP": 0.584
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.661,
|
527 |
+
"Python": 0.678,
|
528 |
+
"HTML": 0.793,
|
529 |
+
"Java": 0.687,
|
530 |
+
"PHP": 0.654
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.743,
|
534 |
+
"Python": 0.718,
|
535 |
+
"HTML": 0.902,
|
536 |
+
"Java": 0.729,
|
537 |
+
"PHP": 0.683
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.628,
|
541 |
+
"Python": 0.645,
|
542 |
+
"HTML": 0.702,
|
543 |
+
"Java": 0.645,
|
544 |
+
"PHP": 0.634
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.65,
|
548 |
+
"Python": 0.635,
|
549 |
+
"HTML": 0.767,
|
550 |
+
"Java": 0.655,
|
551 |
+
"PHP": 0.892
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.83,
|
555 |
+
"Python": 0.688,
|
556 |
+
"HTML": 0.945,
|
557 |
+
"Java": 0.68,
|
558 |
+
"PHP": 0.913
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9410000443458557,
|
564 |
+
"1": 0.9880000352859497,
|
565 |
+
"2": 0.9340000152587891,
|
566 |
+
"3": 0.9500000476837158
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9430000185966492,
|
570 |
+
"1": 0.984000027179718,
|
571 |
+
"2": 0.937000036239624,
|
572 |
+
"3": 0.9510000348091125
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.574,
|
576 |
+
"1": 0.684,
|
577 |
+
"2": 0.673,
|
578 |
+
"3": 0.648
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.695,
|
582 |
+
"1": 0.798,
|
583 |
+
"2": 0.695,
|
584 |
+
"3": 0.81
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.82,
|
588 |
+
"1": 0.88,
|
589 |
+
"2": 0.761,
|
590 |
+
"3": 0.853
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.845,
|
594 |
+
"1": 0.953,
|
595 |
+
"2": 0.85,
|
596 |
+
"3": 0.612
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.853,
|
600 |
+
"1": 0.942,
|
601 |
+
"2": 0.86,
|
602 |
+
"3": 0.824
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.88,
|
606 |
+
"1": 0.973,
|
607 |
+
"2": 0.859,
|
608 |
+
"3": 0.847
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 1.0,
|
615 |
+
"de": 0.9980000257492065,
|
616 |
+
"es": 0.999000072479248,
|
617 |
+
"nl": 0.999000072479248
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.746,
|
628 |
+
"fr": 0.605,
|
629 |
+
"de": 0.751,
|
630 |
+
"es": 0.518,
|
631 |
+
"nl": 0.656
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.818,
|
635 |
+
"fr": 0.597,
|
636 |
+
"de": 0.828,
|
637 |
+
"es": 0.911,
|
638 |
+
"nl": 0.759
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.887,
|
642 |
+
"fr": 0.912,
|
643 |
+
"de": 0.875,
|
644 |
+
"es": 0.983,
|
645 |
+
"nl": 0.855
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.999,
|
649 |
+
"fr": 0.994,
|
650 |
+
"de": 0.899,
|
651 |
+
"es": 0.878,
|
652 |
+
"nl": 0.673
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.998,
|
656 |
+
"fr": 0.995,
|
657 |
+
"de": 0.912,
|
658 |
+
"es": 0.994,
|
659 |
+
"nl": 0.762
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.999,
|
663 |
+
"fr": 0.996,
|
664 |
+
"de": 0.958,
|
665 |
+
"es": 0.996,
|
666 |
+
"nl": 0.998
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "bcbf147a-e87a-4732-93ea-18267a09915c",
|
30 |
+
"datetime_epoch_millis": 1737158763061,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9586687814444304,
|
34 |
+
"llm_top_1_test_accuracy": 0.65339375,
|
35 |
+
"llm_top_2_test_accuracy": 0.7189875000000001,
|
36 |
+
"llm_top_5_test_accuracy": 0.7788625,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9570750448852777,
|
44 |
+
"sae_top_1_test_accuracy": 0.7721,
|
45 |
+
"sae_top_2_test_accuracy": 0.812975,
|
46 |
+
"sae_top_5_test_accuracy": 0.86725,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9648000478744507,
|
65 |
+
"sae_top_1_test_accuracy": 0.7902,
|
66 |
+
"sae_top_2_test_accuracy": 0.8588000000000001,
|
67 |
+
"sae_top_5_test_accuracy": 0.8735999999999999,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9530000329017639,
|
76 |
+
"llm_top_1_test_accuracy": 0.6716,
|
77 |
+
"llm_top_2_test_accuracy": 0.7292,
|
78 |
+
"llm_top_5_test_accuracy": 0.7602,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9530000448226928,
|
84 |
+
"sae_top_1_test_accuracy": 0.7408,
|
85 |
+
"sae_top_2_test_accuracy": 0.7636000000000001,
|
86 |
+
"sae_top_5_test_accuracy": 0.8230000000000001,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9266000390052795,
|
95 |
+
"llm_top_1_test_accuracy": 0.6862,
|
96 |
+
"llm_top_2_test_accuracy": 0.738,
|
97 |
+
"llm_top_5_test_accuracy": 0.7498,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9304000496864319,
|
103 |
+
"sae_top_1_test_accuracy": 0.7734,
|
104 |
+
"sae_top_2_test_accuracy": 0.794,
|
105 |
+
"sae_top_5_test_accuracy": 0.8246,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9152000427246094,
|
114 |
+
"llm_top_1_test_accuracy": 0.6066,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466000000000001,
|
116 |
+
"llm_top_5_test_accuracy": 0.6782,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9112000584602356,
|
122 |
+
"sae_top_1_test_accuracy": 0.7338000000000001,
|
123 |
+
"sae_top_2_test_accuracy": 0.818,
|
124 |
+
"sae_top_5_test_accuracy": 0.8213999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9790000319480896,
|
141 |
+
"sae_top_1_test_accuracy": 0.679,
|
142 |
+
"sae_top_2_test_accuracy": 0.741,
|
143 |
+
"sae_top_5_test_accuracy": 0.922,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9704000353813171,
|
152 |
+
"llm_top_1_test_accuracy": 0.6472,
|
153 |
+
"llm_top_2_test_accuracy": 0.6946,
|
154 |
+
"llm_top_5_test_accuracy": 0.7550000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9672000408172607,
|
160 |
+
"sae_top_1_test_accuracy": 0.6896,
|
161 |
+
"sae_top_2_test_accuracy": 0.7412,
|
162 |
+
"sae_top_5_test_accuracy": 0.8218,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9537500292062759,
|
171 |
+
"llm_top_1_test_accuracy": 0.64475,
|
172 |
+
"llm_top_2_test_accuracy": 0.7494999999999999,
|
173 |
+
"llm_top_5_test_accuracy": 0.8285,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9520000368356705,
|
179 |
+
"sae_top_1_test_accuracy": 0.869,
|
180 |
+
"sae_top_2_test_accuracy": 0.8839999999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.892,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 1.0,
|
190 |
+
"llm_top_1_test_accuracy": 0.6552,
|
191 |
+
"llm_top_2_test_accuracy": 0.7826,
|
192 |
+
"llm_top_5_test_accuracy": 0.9024000000000001,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9990000486373901,
|
198 |
+
"sae_top_1_test_accuracy": 0.901,
|
199 |
+
"sae_top_2_test_accuracy": 0.9032,
|
200 |
+
"sae_top_5_test_accuracy": 0.9596,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4",
|
210 |
+
"sae_lens_version": "5.3.0",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9460000395774841,
|
240 |
+
"1": 0.968000054359436,
|
241 |
+
"2": 0.9510000348091125,
|
242 |
+
"6": 0.987000048160553,
|
243 |
+
"9": 0.9720000624656677
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.683,
|
275 |
+
"1": 0.633,
|
276 |
+
"2": 0.866,
|
277 |
+
"6": 0.821,
|
278 |
+
"9": 0.948
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.817,
|
282 |
+
"1": 0.684,
|
283 |
+
"2": 0.87,
|
284 |
+
"6": 0.978,
|
285 |
+
"9": 0.945
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.85,
|
289 |
+
"1": 0.732,
|
290 |
+
"2": 0.87,
|
291 |
+
"6": 0.973,
|
292 |
+
"9": 0.943
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9660000205039978,
|
298 |
+
"13": 0.9540000557899475,
|
299 |
+
"14": 0.956000030040741,
|
300 |
+
"18": 0.9310000538825989,
|
301 |
+
"19": 0.9580000638961792
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9700000286102295,
|
305 |
+
"13": 0.9470000267028809,
|
306 |
+
"14": 0.9550000429153442,
|
307 |
+
"18": 0.9270000457763672,
|
308 |
+
"19": 0.9660000205039978
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.57,
|
312 |
+
"13": 0.67,
|
313 |
+
"14": 0.639,
|
314 |
+
"18": 0.691,
|
315 |
+
"19": 0.788
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.751,
|
319 |
+
"13": 0.721,
|
320 |
+
"14": 0.684,
|
321 |
+
"18": 0.714,
|
322 |
+
"19": 0.776
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.772,
|
326 |
+
"13": 0.748,
|
327 |
+
"14": 0.727,
|
328 |
+
"18": 0.722,
|
329 |
+
"19": 0.832
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.869,
|
333 |
+
"13": 0.662,
|
334 |
+
"14": 0.641,
|
335 |
+
"18": 0.681,
|
336 |
+
"19": 0.851
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.86,
|
340 |
+
"13": 0.709,
|
341 |
+
"14": 0.668,
|
342 |
+
"18": 0.74,
|
343 |
+
"19": 0.841
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.886,
|
347 |
+
"13": 0.747,
|
348 |
+
"14": 0.85,
|
349 |
+
"18": 0.785,
|
350 |
+
"19": 0.847
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9640000462532043,
|
356 |
+
"21": 0.921000063419342,
|
357 |
+
"22": 0.921000063419342,
|
358 |
+
"25": 0.9610000252723694,
|
359 |
+
"26": 0.8850000500679016
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9520000219345093,
|
363 |
+
"21": 0.9240000247955322,
|
364 |
+
"22": 0.9190000295639038,
|
365 |
+
"25": 0.9580000638961792,
|
366 |
+
"26": 0.8800000548362732
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.709,
|
370 |
+
"21": 0.761,
|
371 |
+
"22": 0.641,
|
372 |
+
"25": 0.701,
|
373 |
+
"26": 0.619
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.801,
|
377 |
+
"21": 0.776,
|
378 |
+
"22": 0.678,
|
379 |
+
"25": 0.753,
|
380 |
+
"26": 0.682
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.812,
|
384 |
+
"21": 0.806,
|
385 |
+
"22": 0.687,
|
386 |
+
"25": 0.791,
|
387 |
+
"26": 0.653
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.849,
|
391 |
+
"21": 0.803,
|
392 |
+
"22": 0.653,
|
393 |
+
"25": 0.888,
|
394 |
+
"26": 0.674
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.823,
|
398 |
+
"21": 0.807,
|
399 |
+
"22": 0.798,
|
400 |
+
"25": 0.882,
|
401 |
+
"26": 0.66
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.879,
|
405 |
+
"21": 0.837,
|
406 |
+
"22": 0.806,
|
407 |
+
"25": 0.895,
|
408 |
+
"26": 0.706
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9530000686645508,
|
414 |
+
"2": 0.9220000505447388,
|
415 |
+
"3": 0.9080000519752502,
|
416 |
+
"5": 0.9170000553131104,
|
417 |
+
"6": 0.8560000658035278
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9470000267028809,
|
421 |
+
"2": 0.9280000329017639,
|
422 |
+
"3": 0.9130000472068787,
|
423 |
+
"5": 0.9260000586509705,
|
424 |
+
"6": 0.862000048160553
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.662,
|
428 |
+
"2": 0.612,
|
429 |
+
"3": 0.601,
|
430 |
+
"5": 0.575,
|
431 |
+
"6": 0.583
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.749,
|
435 |
+
"2": 0.661,
|
436 |
+
"3": 0.62,
|
437 |
+
"5": 0.58,
|
438 |
+
"6": 0.623
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.758,
|
442 |
+
"2": 0.642,
|
443 |
+
"3": 0.628,
|
444 |
+
"5": 0.665,
|
445 |
+
"6": 0.698
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.854,
|
449 |
+
"2": 0.637,
|
450 |
+
"3": 0.589,
|
451 |
+
"5": 0.849,
|
452 |
+
"6": 0.74
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.915,
|
456 |
+
"2": 0.857,
|
457 |
+
"3": 0.715,
|
458 |
+
"5": 0.853,
|
459 |
+
"6": 0.75
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.919,
|
463 |
+
"2": 0.861,
|
464 |
+
"3": 0.738,
|
465 |
+
"5": 0.844,
|
466 |
+
"6": 0.745
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9790000319480896,
|
472 |
+
"5.0": 0.9790000319480896
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9800000190734863,
|
476 |
+
"5.0": 0.9820000529289246
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.679,
|
492 |
+
"5.0": 0.679
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.741,
|
496 |
+
"5.0": 0.741
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.922,
|
500 |
+
"5.0": 0.922
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9520000219345093,
|
506 |
+
"Python": 0.984000027179718,
|
507 |
+
"HTML": 0.9850000739097595,
|
508 |
+
"Java": 0.9610000252723694,
|
509 |
+
"PHP": 0.9540000557899475
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9570000171661377,
|
513 |
+
"Python": 0.9890000224113464,
|
514 |
+
"HTML": 0.9910000562667847,
|
515 |
+
"Java": 0.9590000510215759,
|
516 |
+
"PHP": 0.956000030040741
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.67,
|
520 |
+
"Python": 0.636,
|
521 |
+
"HTML": 0.735,
|
522 |
+
"Java": 0.611,
|
523 |
+
"PHP": 0.584
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.661,
|
527 |
+
"Python": 0.678,
|
528 |
+
"HTML": 0.793,
|
529 |
+
"Java": 0.687,
|
530 |
+
"PHP": 0.654
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.743,
|
534 |
+
"Python": 0.718,
|
535 |
+
"HTML": 0.902,
|
536 |
+
"Java": 0.729,
|
537 |
+
"PHP": 0.683
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.65,
|
541 |
+
"Python": 0.597,
|
542 |
+
"HTML": 0.777,
|
543 |
+
"Java": 0.569,
|
544 |
+
"PHP": 0.855
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.859,
|
548 |
+
"Python": 0.635,
|
549 |
+
"HTML": 0.76,
|
550 |
+
"Java": 0.605,
|
551 |
+
"PHP": 0.847
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.88,
|
555 |
+
"Python": 0.71,
|
556 |
+
"HTML": 0.943,
|
557 |
+
"Java": 0.69,
|
558 |
+
"PHP": 0.886
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9380000233650208,
|
564 |
+
"1": 0.987000048160553,
|
565 |
+
"2": 0.9320000410079956,
|
566 |
+
"3": 0.9510000348091125
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9430000185966492,
|
570 |
+
"1": 0.984000027179718,
|
571 |
+
"2": 0.937000036239624,
|
572 |
+
"3": 0.9510000348091125
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.574,
|
576 |
+
"1": 0.684,
|
577 |
+
"2": 0.673,
|
578 |
+
"3": 0.648
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.695,
|
582 |
+
"1": 0.798,
|
583 |
+
"2": 0.695,
|
584 |
+
"3": 0.81
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.82,
|
588 |
+
"1": 0.88,
|
589 |
+
"2": 0.761,
|
590 |
+
"3": 0.853
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.805,
|
594 |
+
"1": 0.976,
|
595 |
+
"2": 0.872,
|
596 |
+
"3": 0.823
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.841,
|
600 |
+
"1": 0.975,
|
601 |
+
"2": 0.88,
|
602 |
+
"3": 0.84
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.872,
|
606 |
+
"1": 0.977,
|
607 |
+
"2": 0.872,
|
608 |
+
"3": 0.847
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 0.999000072479248,
|
615 |
+
"de": 0.999000072479248,
|
616 |
+
"es": 0.9980000257492065,
|
617 |
+
"nl": 1.0
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.746,
|
628 |
+
"fr": 0.605,
|
629 |
+
"de": 0.751,
|
630 |
+
"es": 0.518,
|
631 |
+
"nl": 0.656
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.818,
|
635 |
+
"fr": 0.597,
|
636 |
+
"de": 0.828,
|
637 |
+
"es": 0.911,
|
638 |
+
"nl": 0.759
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.887,
|
642 |
+
"fr": 0.912,
|
643 |
+
"de": 0.875,
|
644 |
+
"es": 0.983,
|
645 |
+
"nl": 0.855
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 1.0,
|
649 |
+
"fr": 0.992,
|
650 |
+
"de": 0.895,
|
651 |
+
"es": 0.919,
|
652 |
+
"nl": 0.699
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 1.0,
|
656 |
+
"fr": 0.994,
|
657 |
+
"de": 0.892,
|
658 |
+
"es": 0.933,
|
659 |
+
"nl": 0.697
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.999,
|
663 |
+
"fr": 0.99,
|
664 |
+
"de": 0.943,
|
665 |
+
"es": 0.975,
|
666 |
+
"nl": 0.891
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "8c7b4adf-17cb-48eb-89ab-3ca4c6f77e88",
|
30 |
+
"datetime_epoch_millis": 1737414571708,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9594125431030989,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620500000000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7194124999999999,
|
36 |
+
"llm_top_5_test_accuracy": 0.78095,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9542500443756581,
|
44 |
+
"sae_top_1_test_accuracy": 0.76284375,
|
45 |
+
"sae_top_2_test_accuracy": 0.80590625,
|
46 |
+
"sae_top_5_test_accuracy": 0.8690999999999999,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9614000439643859,
|
65 |
+
"sae_top_1_test_accuracy": 0.7912,
|
66 |
+
"sae_top_2_test_accuracy": 0.8108000000000001,
|
67 |
+
"sae_top_5_test_accuracy": 0.8628,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9550000548362731,
|
76 |
+
"llm_top_1_test_accuracy": 0.6718000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7086,
|
78 |
+
"llm_top_5_test_accuracy": 0.7617999999999999,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9432000398635865,
|
84 |
+
"sae_top_1_test_accuracy": 0.7878,
|
85 |
+
"sae_top_2_test_accuracy": 0.7876,
|
86 |
+
"sae_top_5_test_accuracy": 0.8308,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9320000410079956,
|
95 |
+
"llm_top_1_test_accuracy": 0.6904,
|
96 |
+
"llm_top_2_test_accuracy": 0.7382000000000001,
|
97 |
+
"llm_top_5_test_accuracy": 0.7714000000000001,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9252000451087952,
|
103 |
+
"sae_top_1_test_accuracy": 0.7928,
|
104 |
+
"sae_top_2_test_accuracy": 0.8089999999999999,
|
105 |
+
"sae_top_5_test_accuracy": 0.8588000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9176000475883483,
|
114 |
+
"llm_top_1_test_accuracy": 0.6004,
|
115 |
+
"llm_top_2_test_accuracy": 0.6458,
|
116 |
+
"llm_top_5_test_accuracy": 0.6648,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9168000459671021,
|
122 |
+
"sae_top_1_test_accuracy": 0.6296,
|
123 |
+
"sae_top_2_test_accuracy": 0.7504,
|
124 |
+
"sae_top_5_test_accuracy": 0.8099999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9815000593662262,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9720000624656677,
|
141 |
+
"sae_top_1_test_accuracy": 0.77,
|
142 |
+
"sae_top_2_test_accuracy": 0.834,
|
143 |
+
"sae_top_5_test_accuracy": 0.919,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9690000414848328,
|
152 |
+
"llm_top_1_test_accuracy": 0.6622,
|
153 |
+
"llm_top_2_test_accuracy": 0.6940000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7559999999999999,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9692000389099121,
|
160 |
+
"sae_top_1_test_accuracy": 0.6336,
|
161 |
+
"sae_top_2_test_accuracy": 0.7116,
|
162 |
+
"sae_top_5_test_accuracy": 0.8055999999999999,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9510000497102737,
|
171 |
+
"llm_top_1_test_accuracy": 0.714,
|
172 |
+
"llm_top_2_test_accuracy": 0.7635,
|
173 |
+
"llm_top_5_test_accuracy": 0.828,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9480000436306,
|
179 |
+
"sae_top_1_test_accuracy": 0.7827500000000001,
|
180 |
+
"sae_top_2_test_accuracy": 0.81125,
|
181 |
+
"sae_top_5_test_accuracy": 0.882,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9998000144958497,
|
190 |
+
"llm_top_1_test_accuracy": 0.642,
|
191 |
+
"llm_top_2_test_accuracy": 0.7938,
|
192 |
+
"llm_top_5_test_accuracy": 0.9088,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9982000350952148,
|
198 |
+
"sae_top_1_test_accuracy": 0.915,
|
199 |
+
"sae_top_2_test_accuracy": 0.9326000000000001,
|
200 |
+
"sae_top_5_test_accuracy": 0.9837999999999999,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1",
|
210 |
+
"sae_lens_version": "5.3.2",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 65536,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9430000185966492,
|
240 |
+
"1": 0.9590000510215759,
|
241 |
+
"2": 0.9470000267028809,
|
242 |
+
"6": 0.9850000739097595,
|
243 |
+
"9": 0.9730000495910645
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.588,
|
275 |
+
"1": 0.624,
|
276 |
+
"2": 0.868,
|
277 |
+
"6": 0.977,
|
278 |
+
"9": 0.899
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.621,
|
282 |
+
"1": 0.627,
|
283 |
+
"2": 0.898,
|
284 |
+
"6": 0.975,
|
285 |
+
"9": 0.933
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.743,
|
289 |
+
"1": 0.74,
|
290 |
+
"2": 0.911,
|
291 |
+
"6": 0.978,
|
292 |
+
"9": 0.942
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9550000429153442,
|
298 |
+
"13": 0.9450000524520874,
|
299 |
+
"14": 0.9410000443458557,
|
300 |
+
"18": 0.9200000166893005,
|
301 |
+
"19": 0.9550000429153442
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9580000638961792,
|
305 |
+
"13": 0.9590000510215759,
|
306 |
+
"14": 0.9600000381469727,
|
307 |
+
"18": 0.9440000653266907,
|
308 |
+
"19": 0.9540000557899475
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.564,
|
312 |
+
"13": 0.669,
|
313 |
+
"14": 0.644,
|
314 |
+
"18": 0.701,
|
315 |
+
"19": 0.781
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.689,
|
319 |
+
"13": 0.709,
|
320 |
+
"14": 0.667,
|
321 |
+
"18": 0.71,
|
322 |
+
"19": 0.768
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.792,
|
326 |
+
"13": 0.74,
|
327 |
+
"14": 0.723,
|
328 |
+
"18": 0.727,
|
329 |
+
"19": 0.827
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.851,
|
333 |
+
"13": 0.678,
|
334 |
+
"14": 0.875,
|
335 |
+
"18": 0.704,
|
336 |
+
"19": 0.831
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.856,
|
340 |
+
"13": 0.678,
|
341 |
+
"14": 0.863,
|
342 |
+
"18": 0.704,
|
343 |
+
"19": 0.837
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.933,
|
347 |
+
"13": 0.756,
|
348 |
+
"14": 0.882,
|
349 |
+
"18": 0.741,
|
350 |
+
"19": 0.842
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9500000476837158,
|
356 |
+
"21": 0.9170000553131104,
|
357 |
+
"22": 0.9040000438690186,
|
358 |
+
"25": 0.956000030040741,
|
359 |
+
"26": 0.8990000486373901
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9620000720024109,
|
363 |
+
"21": 0.9190000295639038,
|
364 |
+
"22": 0.9150000214576721,
|
365 |
+
"25": 0.9600000381469727,
|
366 |
+
"26": 0.9040000438690186
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.715,
|
370 |
+
"21": 0.761,
|
371 |
+
"22": 0.638,
|
372 |
+
"25": 0.698,
|
373 |
+
"26": 0.64
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.81,
|
377 |
+
"21": 0.776,
|
378 |
+
"22": 0.679,
|
379 |
+
"25": 0.754,
|
380 |
+
"26": 0.672
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.878,
|
384 |
+
"21": 0.803,
|
385 |
+
"22": 0.686,
|
386 |
+
"25": 0.798,
|
387 |
+
"26": 0.692
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.817,
|
391 |
+
"21": 0.787,
|
392 |
+
"22": 0.869,
|
393 |
+
"25": 0.874,
|
394 |
+
"26": 0.617
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.865,
|
398 |
+
"21": 0.821,
|
399 |
+
"22": 0.896,
|
400 |
+
"25": 0.848,
|
401 |
+
"26": 0.615
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.929,
|
405 |
+
"21": 0.84,
|
406 |
+
"22": 0.889,
|
407 |
+
"25": 0.88,
|
408 |
+
"26": 0.756
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9540000557899475,
|
414 |
+
"2": 0.9270000457763672,
|
415 |
+
"3": 0.9150000214576721,
|
416 |
+
"5": 0.9110000729560852,
|
417 |
+
"6": 0.8770000338554382
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9470000267028809,
|
421 |
+
"2": 0.9300000667572021,
|
422 |
+
"3": 0.9250000715255737,
|
423 |
+
"5": 0.9200000166893005,
|
424 |
+
"6": 0.8660000562667847
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.658,
|
428 |
+
"2": 0.579,
|
429 |
+
"3": 0.609,
|
430 |
+
"5": 0.568,
|
431 |
+
"6": 0.588
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.754,
|
435 |
+
"2": 0.64,
|
436 |
+
"3": 0.605,
|
437 |
+
"5": 0.609,
|
438 |
+
"6": 0.621
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.765,
|
442 |
+
"2": 0.64,
|
443 |
+
"3": 0.614,
|
444 |
+
"5": 0.638,
|
445 |
+
"6": 0.667
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.629,
|
449 |
+
"2": 0.596,
|
450 |
+
"3": 0.54,
|
451 |
+
"5": 0.799,
|
452 |
+
"6": 0.584
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.813,
|
456 |
+
"2": 0.635,
|
457 |
+
"3": 0.68,
|
458 |
+
"5": 0.884,
|
459 |
+
"6": 0.74
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.91,
|
463 |
+
"2": 0.738,
|
464 |
+
"3": 0.755,
|
465 |
+
"5": 0.887,
|
466 |
+
"6": 0.76
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.971000075340271,
|
472 |
+
"5.0": 0.9730000495910645
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9820000529289246,
|
476 |
+
"5.0": 0.9810000658035278
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.77,
|
492 |
+
"5.0": 0.77
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.834,
|
496 |
+
"5.0": 0.834
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.919,
|
500 |
+
"5.0": 0.919
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9570000171661377,
|
506 |
+
"Python": 0.9750000238418579,
|
507 |
+
"HTML": 0.9910000562667847,
|
508 |
+
"Java": 0.968000054359436,
|
509 |
+
"PHP": 0.9550000429153442
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.956000030040741,
|
513 |
+
"Python": 0.9830000400543213,
|
514 |
+
"HTML": 0.9880000352859497,
|
515 |
+
"Java": 0.9630000591278076,
|
516 |
+
"PHP": 0.9550000429153442
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.658,
|
520 |
+
"Python": 0.632,
|
521 |
+
"HTML": 0.789,
|
522 |
+
"Java": 0.63,
|
523 |
+
"PHP": 0.602
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.675,
|
527 |
+
"Python": 0.669,
|
528 |
+
"HTML": 0.829,
|
529 |
+
"Java": 0.656,
|
530 |
+
"PHP": 0.641
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.755,
|
534 |
+
"Python": 0.718,
|
535 |
+
"HTML": 0.893,
|
536 |
+
"Java": 0.735,
|
537 |
+
"PHP": 0.679
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.632,
|
541 |
+
"Python": 0.625,
|
542 |
+
"HTML": 0.688,
|
543 |
+
"Java": 0.62,
|
544 |
+
"PHP": 0.603
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.738,
|
548 |
+
"Python": 0.663,
|
549 |
+
"HTML": 0.897,
|
550 |
+
"Java": 0.655,
|
551 |
+
"PHP": 0.605
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.785,
|
555 |
+
"Python": 0.68,
|
556 |
+
"HTML": 0.944,
|
557 |
+
"Java": 0.699,
|
558 |
+
"PHP": 0.92
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9380000233650208,
|
564 |
+
"1": 0.9810000658035278,
|
565 |
+
"2": 0.9290000200271606,
|
566 |
+
"3": 0.9440000653266907
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9390000700950623,
|
570 |
+
"1": 0.9890000224113464,
|
571 |
+
"2": 0.9300000667572021,
|
572 |
+
"3": 0.9460000395774841
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.809,
|
576 |
+
"1": 0.654,
|
577 |
+
"2": 0.658,
|
578 |
+
"3": 0.735
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.809,
|
582 |
+
"1": 0.798,
|
583 |
+
"2": 0.696,
|
584 |
+
"3": 0.751
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.817,
|
588 |
+
"1": 0.876,
|
589 |
+
"2": 0.775,
|
590 |
+
"3": 0.844
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.859,
|
594 |
+
"1": 0.955,
|
595 |
+
"2": 0.604,
|
596 |
+
"3": 0.713
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.866,
|
600 |
+
"1": 0.937,
|
601 |
+
"2": 0.714,
|
602 |
+
"3": 0.728
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.878,
|
606 |
+
"1": 0.952,
|
607 |
+
"2": 0.828,
|
608 |
+
"3": 0.87
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 0.9970000386238098,
|
615 |
+
"de": 0.9980000257492065,
|
616 |
+
"es": 1.0,
|
617 |
+
"nl": 0.9970000386238098
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 0.999000072479248,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.722,
|
628 |
+
"fr": 0.592,
|
629 |
+
"de": 0.76,
|
630 |
+
"es": 0.481,
|
631 |
+
"nl": 0.655
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.845,
|
635 |
+
"fr": 0.594,
|
636 |
+
"de": 0.829,
|
637 |
+
"es": 0.967,
|
638 |
+
"nl": 0.734
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.891,
|
642 |
+
"fr": 0.918,
|
643 |
+
"de": 0.897,
|
644 |
+
"es": 0.977,
|
645 |
+
"nl": 0.861
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.999,
|
649 |
+
"fr": 0.993,
|
650 |
+
"de": 0.917,
|
651 |
+
"es": 0.918,
|
652 |
+
"nl": 0.748
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.997,
|
656 |
+
"fr": 0.997,
|
657 |
+
"de": 0.928,
|
658 |
+
"es": 0.992,
|
659 |
+
"nl": 0.749
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.997,
|
663 |
+
"fr": 0.997,
|
664 |
+
"de": 0.934,
|
665 |
+
"es": 0.994,
|
666 |
+
"nl": 0.997
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "097162b4-2102-4806-98e5-0369afa5a457",
|
30 |
+
"datetime_epoch_millis": 1737415694424,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9594125431030989,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620500000000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7194124999999999,
|
36 |
+
"llm_top_5_test_accuracy": 0.78095,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9532000396400689,
|
44 |
+
"sae_top_1_test_accuracy": 0.7766625,
|
45 |
+
"sae_top_2_test_accuracy": 0.8073,
|
46 |
+
"sae_top_5_test_accuracy": 0.8809,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9600000500679016,
|
65 |
+
"sae_top_1_test_accuracy": 0.7722,
|
66 |
+
"sae_top_2_test_accuracy": 0.8141999999999999,
|
67 |
+
"sae_top_5_test_accuracy": 0.8936,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9550000548362731,
|
76 |
+
"llm_top_1_test_accuracy": 0.6718000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7086,
|
78 |
+
"llm_top_5_test_accuracy": 0.7617999999999999,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9466000318527221,
|
84 |
+
"sae_top_1_test_accuracy": 0.741,
|
85 |
+
"sae_top_2_test_accuracy": 0.767,
|
86 |
+
"sae_top_5_test_accuracy": 0.8666,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9320000410079956,
|
95 |
+
"llm_top_1_test_accuracy": 0.6904,
|
96 |
+
"llm_top_2_test_accuracy": 0.7382000000000001,
|
97 |
+
"llm_top_5_test_accuracy": 0.7714000000000001,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9210000395774841,
|
103 |
+
"sae_top_1_test_accuracy": 0.8225999999999999,
|
104 |
+
"sae_top_2_test_accuracy": 0.8408000000000001,
|
105 |
+
"sae_top_5_test_accuracy": 0.8640000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9176000475883483,
|
114 |
+
"llm_top_1_test_accuracy": 0.6004,
|
115 |
+
"llm_top_2_test_accuracy": 0.6458,
|
116 |
+
"llm_top_5_test_accuracy": 0.6648,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.913800036907196,
|
122 |
+
"sae_top_1_test_accuracy": 0.7272000000000001,
|
123 |
+
"sae_top_2_test_accuracy": 0.7906000000000001,
|
124 |
+
"sae_top_5_test_accuracy": 0.8364,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9815000593662262,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9670000374317169,
|
141 |
+
"sae_top_1_test_accuracy": 0.889,
|
142 |
+
"sae_top_2_test_accuracy": 0.895,
|
143 |
+
"sae_top_5_test_accuracy": 0.933,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9690000414848328,
|
152 |
+
"llm_top_1_test_accuracy": 0.6622,
|
153 |
+
"llm_top_2_test_accuracy": 0.6940000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7559999999999999,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9702000379562378,
|
160 |
+
"sae_top_1_test_accuracy": 0.6464000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.7192000000000001,
|
162 |
+
"sae_top_5_test_accuracy": 0.8368,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9510000497102737,
|
171 |
+
"llm_top_1_test_accuracy": 0.714,
|
172 |
+
"llm_top_2_test_accuracy": 0.7635,
|
173 |
+
"llm_top_5_test_accuracy": 0.828,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9500000327825546,
|
179 |
+
"sae_top_1_test_accuracy": 0.6825,
|
180 |
+
"sae_top_2_test_accuracy": 0.7,
|
181 |
+
"sae_top_5_test_accuracy": 0.826,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9998000144958497,
|
190 |
+
"llm_top_1_test_accuracy": 0.642,
|
191 |
+
"llm_top_2_test_accuracy": 0.7938,
|
192 |
+
"llm_top_5_test_accuracy": 0.9088,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9970000505447387,
|
198 |
+
"sae_top_1_test_accuracy": 0.9324,
|
199 |
+
"sae_top_2_test_accuracy": 0.9316000000000001,
|
200 |
+
"sae_top_5_test_accuracy": 0.9907999999999999,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1",
|
210 |
+
"sae_lens_version": "5.3.2",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 65536,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9390000700950623,
|
240 |
+
"1": 0.9580000638961792,
|
241 |
+
"2": 0.9460000395774841,
|
242 |
+
"6": 0.9830000400543213,
|
243 |
+
"9": 0.9740000367164612
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.633,
|
275 |
+
"1": 0.616,
|
276 |
+
"2": 0.872,
|
277 |
+
"6": 0.828,
|
278 |
+
"9": 0.912
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.656,
|
282 |
+
"1": 0.655,
|
283 |
+
"2": 0.864,
|
284 |
+
"6": 0.98,
|
285 |
+
"9": 0.916
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.852,
|
289 |
+
"1": 0.835,
|
290 |
+
"2": 0.866,
|
291 |
+
"6": 0.983,
|
292 |
+
"9": 0.932
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9570000171661377,
|
298 |
+
"13": 0.9540000557899475,
|
299 |
+
"14": 0.9420000314712524,
|
300 |
+
"18": 0.9190000295639038,
|
301 |
+
"19": 0.9610000252723694
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9580000638961792,
|
305 |
+
"13": 0.9590000510215759,
|
306 |
+
"14": 0.9600000381469727,
|
307 |
+
"18": 0.9440000653266907,
|
308 |
+
"19": 0.9540000557899475
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.564,
|
312 |
+
"13": 0.669,
|
313 |
+
"14": 0.644,
|
314 |
+
"18": 0.701,
|
315 |
+
"19": 0.781
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.689,
|
319 |
+
"13": 0.709,
|
320 |
+
"14": 0.667,
|
321 |
+
"18": 0.71,
|
322 |
+
"19": 0.768
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.792,
|
326 |
+
"13": 0.74,
|
327 |
+
"14": 0.723,
|
328 |
+
"18": 0.727,
|
329 |
+
"19": 0.827
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.605,
|
333 |
+
"13": 0.689,
|
334 |
+
"14": 0.879,
|
335 |
+
"18": 0.686,
|
336 |
+
"19": 0.846
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.736,
|
340 |
+
"13": 0.677,
|
341 |
+
"14": 0.875,
|
342 |
+
"18": 0.712,
|
343 |
+
"19": 0.835
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.954,
|
347 |
+
"13": 0.74,
|
348 |
+
"14": 0.882,
|
349 |
+
"18": 0.906,
|
350 |
+
"19": 0.851
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9500000476837158,
|
356 |
+
"21": 0.9150000214576721,
|
357 |
+
"22": 0.909000039100647,
|
358 |
+
"25": 0.9550000429153442,
|
359 |
+
"26": 0.8760000467300415
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9620000720024109,
|
363 |
+
"21": 0.9190000295639038,
|
364 |
+
"22": 0.9150000214576721,
|
365 |
+
"25": 0.9600000381469727,
|
366 |
+
"26": 0.9040000438690186
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.715,
|
370 |
+
"21": 0.761,
|
371 |
+
"22": 0.638,
|
372 |
+
"25": 0.698,
|
373 |
+
"26": 0.64
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.81,
|
377 |
+
"21": 0.776,
|
378 |
+
"22": 0.679,
|
379 |
+
"25": 0.754,
|
380 |
+
"26": 0.672
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.878,
|
384 |
+
"21": 0.803,
|
385 |
+
"22": 0.686,
|
386 |
+
"25": 0.798,
|
387 |
+
"26": 0.692
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.904,
|
391 |
+
"21": 0.826,
|
392 |
+
"22": 0.892,
|
393 |
+
"25": 0.878,
|
394 |
+
"26": 0.613
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.908,
|
398 |
+
"21": 0.826,
|
399 |
+
"22": 0.882,
|
400 |
+
"25": 0.878,
|
401 |
+
"26": 0.71
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.937,
|
405 |
+
"21": 0.854,
|
406 |
+
"22": 0.89,
|
407 |
+
"25": 0.881,
|
408 |
+
"26": 0.758
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9330000281333923,
|
414 |
+
"2": 0.9330000281333923,
|
415 |
+
"3": 0.9120000600814819,
|
416 |
+
"5": 0.9100000262260437,
|
417 |
+
"6": 0.8810000419616699
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9470000267028809,
|
421 |
+
"2": 0.9300000667572021,
|
422 |
+
"3": 0.9250000715255737,
|
423 |
+
"5": 0.9200000166893005,
|
424 |
+
"6": 0.8660000562667847
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.658,
|
428 |
+
"2": 0.579,
|
429 |
+
"3": 0.609,
|
430 |
+
"5": 0.568,
|
431 |
+
"6": 0.588
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.754,
|
435 |
+
"2": 0.64,
|
436 |
+
"3": 0.605,
|
437 |
+
"5": 0.609,
|
438 |
+
"6": 0.621
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.765,
|
442 |
+
"2": 0.64,
|
443 |
+
"3": 0.614,
|
444 |
+
"5": 0.638,
|
445 |
+
"6": 0.667
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.863,
|
449 |
+
"2": 0.766,
|
450 |
+
"3": 0.598,
|
451 |
+
"5": 0.805,
|
452 |
+
"6": 0.604
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.875,
|
456 |
+
"2": 0.843,
|
457 |
+
"3": 0.677,
|
458 |
+
"5": 0.885,
|
459 |
+
"6": 0.673
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.911,
|
463 |
+
"2": 0.853,
|
464 |
+
"3": 0.755,
|
465 |
+
"5": 0.904,
|
466 |
+
"6": 0.759
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9690000414848328,
|
472 |
+
"5.0": 0.9650000333786011
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9820000529289246,
|
476 |
+
"5.0": 0.9810000658035278
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.889,
|
492 |
+
"5.0": 0.889
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.895,
|
496 |
+
"5.0": 0.895
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.933,
|
500 |
+
"5.0": 0.933
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9570000171661377,
|
506 |
+
"Python": 0.987000048160553,
|
507 |
+
"HTML": 0.984000027179718,
|
508 |
+
"Java": 0.9640000462532043,
|
509 |
+
"PHP": 0.9590000510215759
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.956000030040741,
|
513 |
+
"Python": 0.9830000400543213,
|
514 |
+
"HTML": 0.9880000352859497,
|
515 |
+
"Java": 0.9630000591278076,
|
516 |
+
"PHP": 0.9550000429153442
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.658,
|
520 |
+
"Python": 0.632,
|
521 |
+
"HTML": 0.789,
|
522 |
+
"Java": 0.63,
|
523 |
+
"PHP": 0.602
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.675,
|
527 |
+
"Python": 0.669,
|
528 |
+
"HTML": 0.829,
|
529 |
+
"Java": 0.656,
|
530 |
+
"PHP": 0.641
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.755,
|
534 |
+
"Python": 0.718,
|
535 |
+
"HTML": 0.893,
|
536 |
+
"Java": 0.735,
|
537 |
+
"PHP": 0.679
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.632,
|
541 |
+
"Python": 0.623,
|
542 |
+
"HTML": 0.737,
|
543 |
+
"Java": 0.636,
|
544 |
+
"PHP": 0.604
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.622,
|
548 |
+
"Python": 0.655,
|
549 |
+
"HTML": 0.736,
|
550 |
+
"Java": 0.662,
|
551 |
+
"PHP": 0.921
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.715,
|
555 |
+
"Python": 0.783,
|
556 |
+
"HTML": 0.95,
|
557 |
+
"Java": 0.808,
|
558 |
+
"PHP": 0.928
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9420000314712524,
|
564 |
+
"1": 0.9830000400543213,
|
565 |
+
"2": 0.9280000329017639,
|
566 |
+
"3": 0.9470000267028809
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9390000700950623,
|
570 |
+
"1": 0.9890000224113464,
|
571 |
+
"2": 0.9300000667572021,
|
572 |
+
"3": 0.9460000395774841
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.809,
|
576 |
+
"1": 0.654,
|
577 |
+
"2": 0.658,
|
578 |
+
"3": 0.735
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.809,
|
582 |
+
"1": 0.798,
|
583 |
+
"2": 0.696,
|
584 |
+
"3": 0.751
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.817,
|
588 |
+
"1": 0.876,
|
589 |
+
"2": 0.775,
|
590 |
+
"3": 0.844
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.675,
|
594 |
+
"1": 0.687,
|
595 |
+
"2": 0.715,
|
596 |
+
"3": 0.653
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.713,
|
600 |
+
"1": 0.698,
|
601 |
+
"2": 0.734,
|
602 |
+
"3": 0.655
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.827,
|
606 |
+
"1": 0.91,
|
607 |
+
"2": 0.808,
|
608 |
+
"3": 0.759
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.9980000257492065,
|
614 |
+
"fr": 0.999000072479248,
|
615 |
+
"de": 0.9960000514984131,
|
616 |
+
"es": 0.9960000514984131,
|
617 |
+
"nl": 0.9960000514984131
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 0.999000072479248,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.722,
|
628 |
+
"fr": 0.592,
|
629 |
+
"de": 0.76,
|
630 |
+
"es": 0.481,
|
631 |
+
"nl": 0.655
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.845,
|
635 |
+
"fr": 0.594,
|
636 |
+
"de": 0.829,
|
637 |
+
"es": 0.967,
|
638 |
+
"nl": 0.734
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.891,
|
642 |
+
"fr": 0.918,
|
643 |
+
"de": 0.897,
|
644 |
+
"es": 0.977,
|
645 |
+
"nl": 0.861
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 1.0,
|
649 |
+
"fr": 0.996,
|
650 |
+
"de": 0.935,
|
651 |
+
"es": 0.993,
|
652 |
+
"nl": 0.738
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.998,
|
656 |
+
"fr": 0.994,
|
657 |
+
"de": 0.934,
|
658 |
+
"es": 0.993,
|
659 |
+
"nl": 0.739
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.999,
|
663 |
+
"fr": 0.996,
|
664 |
+
"de": 0.964,
|
665 |
+
"es": 0.998,
|
666 |
+
"nl": 0.997
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "819923c5-1c76-4985-9889-e5dfdb54eb01",
|
30 |
+
"datetime_epoch_millis": 1737175633073,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9596437945961953,
|
34 |
+
"llm_top_1_test_accuracy": 0.65660625,
|
35 |
+
"llm_top_2_test_accuracy": 0.7208812499999999,
|
36 |
+
"llm_top_5_test_accuracy": 0.7805375,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9571500454097986,
|
44 |
+
"sae_top_1_test_accuracy": 0.7862,
|
45 |
+
"sae_top_2_test_accuracy": 0.8347125000000001,
|
46 |
+
"sae_top_5_test_accuracy": 0.8826999999999999,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9624000430107117,
|
65 |
+
"sae_top_1_test_accuracy": 0.8603999999999999,
|
66 |
+
"sae_top_2_test_accuracy": 0.8692,
|
67 |
+
"sae_top_5_test_accuracy": 0.905,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9540000557899475,
|
76 |
+
"llm_top_1_test_accuracy": 0.6744,
|
77 |
+
"llm_top_2_test_accuracy": 0.7160000000000001,
|
78 |
+
"llm_top_5_test_accuracy": 0.7646,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9506000518798828,
|
84 |
+
"sae_top_1_test_accuracy": 0.7444,
|
85 |
+
"sae_top_2_test_accuracy": 0.755,
|
86 |
+
"sae_top_5_test_accuracy": 0.8315999999999999,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9344000458717346,
|
95 |
+
"llm_top_1_test_accuracy": 0.6889999999999998,
|
96 |
+
"llm_top_2_test_accuracy": 0.7392,
|
97 |
+
"llm_top_5_test_accuracy": 0.7612000000000001,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.93100004196167,
|
103 |
+
"sae_top_1_test_accuracy": 0.7882,
|
104 |
+
"sae_top_2_test_accuracy": 0.8248,
|
105 |
+
"sae_top_5_test_accuracy": 0.8506,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9188000321388244,
|
114 |
+
"llm_top_1_test_accuracy": 0.5984,
|
115 |
+
"llm_top_2_test_accuracy": 0.6512,
|
116 |
+
"llm_top_5_test_accuracy": 0.671,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9210000514984131,
|
122 |
+
"sae_top_1_test_accuracy": 0.7694,
|
123 |
+
"sae_top_2_test_accuracy": 0.7794000000000001,
|
124 |
+
"sae_top_5_test_accuracy": 0.8231999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9820000529289246,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9750000238418579,
|
141 |
+
"sae_top_1_test_accuracy": 0.647,
|
142 |
+
"sae_top_2_test_accuracy": 0.927,
|
143 |
+
"sae_top_5_test_accuracy": 0.926,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9682000517845154,
|
152 |
+
"llm_top_1_test_accuracy": 0.6596,
|
153 |
+
"llm_top_2_test_accuracy": 0.6933999999999999,
|
154 |
+
"llm_top_5_test_accuracy": 0.7554000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9672000527381897,
|
160 |
+
"sae_top_1_test_accuracy": 0.7484000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.7482,
|
162 |
+
"sae_top_5_test_accuracy": 0.8534,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9507500529289246,
|
171 |
+
"llm_top_1_test_accuracy": 0.66625,
|
172 |
+
"llm_top_2_test_accuracy": 0.77525,
|
173 |
+
"llm_top_5_test_accuracy": 0.8255,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9510000497102737,
|
179 |
+
"sae_top_1_test_accuracy": 0.848,
|
180 |
+
"sae_top_2_test_accuracy": 0.8614999999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.885,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.6496000000000001,
|
191 |
+
"llm_top_2_test_accuracy": 0.7806,
|
192 |
+
"llm_top_5_test_accuracy": 0.9097999999999999,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9990000486373901,
|
198 |
+
"sae_top_1_test_accuracy": 0.8837999999999999,
|
199 |
+
"sae_top_2_test_accuracy": 0.9126,
|
200 |
+
"sae_top_5_test_accuracy": 0.9868,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3",
|
210 |
+
"sae_lens_version": "5.3.0",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 65536,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9440000653266907,
|
240 |
+
"1": 0.9550000429153442,
|
241 |
+
"2": 0.9500000476837158,
|
242 |
+
"6": 0.9890000224113464,
|
243 |
+
"9": 0.9740000367164612
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.84,
|
275 |
+
"1": 0.65,
|
276 |
+
"2": 0.884,
|
277 |
+
"6": 0.98,
|
278 |
+
"9": 0.948
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.847,
|
282 |
+
"1": 0.69,
|
283 |
+
"2": 0.884,
|
284 |
+
"6": 0.98,
|
285 |
+
"9": 0.945
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.846,
|
289 |
+
"1": 0.853,
|
290 |
+
"2": 0.905,
|
291 |
+
"6": 0.98,
|
292 |
+
"9": 0.941
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9630000591278076,
|
298 |
+
"13": 0.9500000476837158,
|
299 |
+
"14": 0.9540000557899475,
|
300 |
+
"18": 0.9260000586509705,
|
301 |
+
"19": 0.9600000381469727
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9640000462532043,
|
305 |
+
"13": 0.9580000638961792,
|
306 |
+
"14": 0.9540000557899475,
|
307 |
+
"18": 0.9390000700950623,
|
308 |
+
"19": 0.9550000429153442
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.563,
|
312 |
+
"13": 0.664,
|
313 |
+
"14": 0.639,
|
314 |
+
"18": 0.712,
|
315 |
+
"19": 0.794
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.687,
|
319 |
+
"13": 0.72,
|
320 |
+
"14": 0.672,
|
321 |
+
"18": 0.728,
|
322 |
+
"19": 0.773
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.791,
|
326 |
+
"13": 0.738,
|
327 |
+
"14": 0.735,
|
328 |
+
"18": 0.74,
|
329 |
+
"19": 0.819
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.857,
|
333 |
+
"13": 0.684,
|
334 |
+
"14": 0.664,
|
335 |
+
"18": 0.675,
|
336 |
+
"19": 0.842
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.857,
|
340 |
+
"13": 0.668,
|
341 |
+
"14": 0.681,
|
342 |
+
"18": 0.73,
|
343 |
+
"19": 0.839
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.864,
|
347 |
+
"13": 0.753,
|
348 |
+
"14": 0.877,
|
349 |
+
"18": 0.808,
|
350 |
+
"19": 0.856
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9600000381469727,
|
356 |
+
"21": 0.9320000410079956,
|
357 |
+
"22": 0.9120000600814819,
|
358 |
+
"25": 0.9550000429153442,
|
359 |
+
"26": 0.8960000276565552
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9600000381469727,
|
363 |
+
"21": 0.9230000376701355,
|
364 |
+
"22": 0.9220000505447388,
|
365 |
+
"25": 0.9670000672340393,
|
366 |
+
"26": 0.9000000357627869
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.707,
|
370 |
+
"21": 0.766,
|
371 |
+
"22": 0.635,
|
372 |
+
"25": 0.706,
|
373 |
+
"26": 0.631
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.811,
|
377 |
+
"21": 0.749,
|
378 |
+
"22": 0.682,
|
379 |
+
"25": 0.763,
|
380 |
+
"26": 0.691
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.872,
|
384 |
+
"21": 0.779,
|
385 |
+
"22": 0.682,
|
386 |
+
"25": 0.796,
|
387 |
+
"26": 0.677
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.859,
|
391 |
+
"21": 0.736,
|
392 |
+
"22": 0.829,
|
393 |
+
"25": 0.873,
|
394 |
+
"26": 0.644
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.89,
|
398 |
+
"21": 0.824,
|
399 |
+
"22": 0.833,
|
400 |
+
"25": 0.881,
|
401 |
+
"26": 0.696
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.911,
|
405 |
+
"21": 0.855,
|
406 |
+
"22": 0.853,
|
407 |
+
"25": 0.867,
|
408 |
+
"26": 0.767
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9440000653266907,
|
414 |
+
"2": 0.9320000410079956,
|
415 |
+
"3": 0.921000063419342,
|
416 |
+
"5": 0.9250000715255737,
|
417 |
+
"6": 0.8830000162124634
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9480000734329224,
|
421 |
+
"2": 0.9290000200271606,
|
422 |
+
"3": 0.9200000166893005,
|
423 |
+
"5": 0.9200000166893005,
|
424 |
+
"6": 0.8770000338554382
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.664,
|
428 |
+
"2": 0.587,
|
429 |
+
"3": 0.585,
|
430 |
+
"5": 0.559,
|
431 |
+
"6": 0.597
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.757,
|
435 |
+
"2": 0.655,
|
436 |
+
"3": 0.603,
|
437 |
+
"5": 0.621,
|
438 |
+
"6": 0.62
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.775,
|
442 |
+
"2": 0.64,
|
443 |
+
"3": 0.624,
|
444 |
+
"5": 0.632,
|
445 |
+
"6": 0.684
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.838,
|
449 |
+
"2": 0.74,
|
450 |
+
"3": 0.67,
|
451 |
+
"5": 0.854,
|
452 |
+
"6": 0.745
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.859,
|
456 |
+
"2": 0.76,
|
457 |
+
"3": 0.674,
|
458 |
+
"5": 0.872,
|
459 |
+
"6": 0.732
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.912,
|
463 |
+
"2": 0.894,
|
464 |
+
"3": 0.68,
|
465 |
+
"5": 0.872,
|
466 |
+
"6": 0.758
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9750000238418579,
|
472 |
+
"5.0": 0.9750000238418579
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9810000658035278,
|
476 |
+
"5.0": 0.9830000400543213
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.647,
|
492 |
+
"5.0": 0.647
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.927,
|
496 |
+
"5.0": 0.927
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.926,
|
500 |
+
"5.0": 0.926
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.956000030040741,
|
506 |
+
"Python": 0.9830000400543213,
|
507 |
+
"HTML": 0.9850000739097595,
|
508 |
+
"Java": 0.968000054359436,
|
509 |
+
"PHP": 0.9440000653266907
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.956000030040741,
|
513 |
+
"Python": 0.9900000691413879,
|
514 |
+
"HTML": 0.9900000691413879,
|
515 |
+
"Java": 0.956000030040741,
|
516 |
+
"PHP": 0.9490000605583191
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.664,
|
520 |
+
"Python": 0.631,
|
521 |
+
"HTML": 0.778,
|
522 |
+
"Java": 0.628,
|
523 |
+
"PHP": 0.597
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.667,
|
527 |
+
"Python": 0.671,
|
528 |
+
"HTML": 0.792,
|
529 |
+
"Java": 0.682,
|
530 |
+
"PHP": 0.655
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.753,
|
534 |
+
"Python": 0.714,
|
535 |
+
"HTML": 0.905,
|
536 |
+
"Java": 0.726,
|
537 |
+
"PHP": 0.679
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.638,
|
541 |
+
"Python": 0.631,
|
542 |
+
"HTML": 0.886,
|
543 |
+
"Java": 0.673,
|
544 |
+
"PHP": 0.914
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.64,
|
548 |
+
"Python": 0.63,
|
549 |
+
"HTML": 0.88,
|
550 |
+
"Java": 0.687,
|
551 |
+
"PHP": 0.904
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.862,
|
555 |
+
"Python": 0.936,
|
556 |
+
"HTML": 0.891,
|
557 |
+
"Java": 0.668,
|
558 |
+
"PHP": 0.91
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.940000057220459,
|
564 |
+
"1": 0.9860000610351562,
|
565 |
+
"2": 0.9320000410079956,
|
566 |
+
"3": 0.9460000395774841
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9380000233650208,
|
570 |
+
"1": 0.9860000610351562,
|
571 |
+
"2": 0.9260000586509705,
|
572 |
+
"3": 0.9530000686645508
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.555,
|
576 |
+
"1": 0.67,
|
577 |
+
"2": 0.669,
|
578 |
+
"3": 0.771
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.807,
|
582 |
+
"1": 0.801,
|
583 |
+
"2": 0.687,
|
584 |
+
"3": 0.806
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.815,
|
588 |
+
"1": 0.886,
|
589 |
+
"2": 0.757,
|
590 |
+
"3": 0.844
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.838,
|
594 |
+
"1": 0.951,
|
595 |
+
"2": 0.878,
|
596 |
+
"3": 0.725
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.867,
|
600 |
+
"1": 0.958,
|
601 |
+
"2": 0.885,
|
602 |
+
"3": 0.736
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.855,
|
606 |
+
"1": 0.967,
|
607 |
+
"2": 0.881,
|
608 |
+
"3": 0.837
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.9980000257492065,
|
614 |
+
"fr": 0.999000072479248,
|
615 |
+
"de": 0.999000072479248,
|
616 |
+
"es": 1.0,
|
617 |
+
"nl": 0.999000072479248
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 0.999000072479248,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 0.999000072479248
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.745,
|
628 |
+
"fr": 0.6,
|
629 |
+
"de": 0.756,
|
630 |
+
"es": 0.492,
|
631 |
+
"nl": 0.655
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.83,
|
635 |
+
"fr": 0.589,
|
636 |
+
"de": 0.825,
|
637 |
+
"es": 0.905,
|
638 |
+
"nl": 0.754
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.89,
|
642 |
+
"fr": 0.919,
|
643 |
+
"de": 0.892,
|
644 |
+
"es": 0.985,
|
645 |
+
"nl": 0.863
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.999,
|
649 |
+
"fr": 0.994,
|
650 |
+
"de": 0.881,
|
651 |
+
"es": 0.925,
|
652 |
+
"nl": 0.62
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.999,
|
656 |
+
"fr": 0.992,
|
657 |
+
"de": 0.889,
|
658 |
+
"es": 0.947,
|
659 |
+
"nl": 0.736
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.999,
|
663 |
+
"fr": 0.998,
|
664 |
+
"de": 0.944,
|
665 |
+
"es": 0.995,
|
666 |
+
"nl": 0.998
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "2fb52972-b8f1-4935-b2c9-85ce89db9c2a",
|
30 |
+
"datetime_epoch_millis": 1737398103036,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9589062932878732,
|
34 |
+
"llm_top_1_test_accuracy": 0.65764375,
|
35 |
+
"llm_top_2_test_accuracy": 0.7212187500000001,
|
36 |
+
"llm_top_5_test_accuracy": 0.78248125,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9569812972098589,
|
44 |
+
"sae_top_1_test_accuracy": 0.7564562499999999,
|
45 |
+
"sae_top_2_test_accuracy": 0.7798562499999999,
|
46 |
+
"sae_top_5_test_accuracy": 0.85685625,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9658000588417053,
|
65 |
+
"sae_top_1_test_accuracy": 0.7704,
|
66 |
+
"sae_top_2_test_accuracy": 0.8124,
|
67 |
+
"sae_top_5_test_accuracy": 0.8994,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9532000422477722,
|
76 |
+
"llm_top_1_test_accuracy": 0.6776,
|
77 |
+
"llm_top_2_test_accuracy": 0.732,
|
78 |
+
"llm_top_5_test_accuracy": 0.7626,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9494000434875488,
|
84 |
+
"sae_top_1_test_accuracy": 0.7442,
|
85 |
+
"sae_top_2_test_accuracy": 0.7772,
|
86 |
+
"sae_top_5_test_accuracy": 0.82,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9290000557899475,
|
95 |
+
"llm_top_1_test_accuracy": 0.6906,
|
96 |
+
"llm_top_2_test_accuracy": 0.7330000000000001,
|
97 |
+
"llm_top_5_test_accuracy": 0.7692000000000001,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9298000454902648,
|
103 |
+
"sae_top_1_test_accuracy": 0.7260000000000001,
|
104 |
+
"sae_top_2_test_accuracy": 0.7442,
|
105 |
+
"sae_top_5_test_accuracy": 0.8364,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9174000382423401,
|
114 |
+
"llm_top_1_test_accuracy": 0.5976,
|
115 |
+
"llm_top_2_test_accuracy": 0.6486000000000001,
|
116 |
+
"llm_top_5_test_accuracy": 0.6646,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9156000494956971,
|
122 |
+
"sae_top_1_test_accuracy": 0.7718,
|
123 |
+
"sae_top_2_test_accuracy": 0.782,
|
124 |
+
"sae_top_5_test_accuracy": 0.8065999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.979500025510788,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9740000367164612,
|
141 |
+
"sae_top_1_test_accuracy": 0.675,
|
142 |
+
"sae_top_2_test_accuracy": 0.682,
|
143 |
+
"sae_top_5_test_accuracy": 0.895,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9728000521659851,
|
152 |
+
"llm_top_1_test_accuracy": 0.6642,
|
153 |
+
"llm_top_2_test_accuracy": 0.6965999999999999,
|
154 |
+
"llm_top_5_test_accuracy": 0.7678,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9676000475883484,
|
160 |
+
"sae_top_1_test_accuracy": 0.6228,
|
161 |
+
"sae_top_2_test_accuracy": 0.6794,
|
162 |
+
"sae_top_5_test_accuracy": 0.7368,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9507500380277634,
|
171 |
+
"llm_top_1_test_accuracy": 0.67475,
|
172 |
+
"llm_top_2_test_accuracy": 0.76475,
|
173 |
+
"llm_top_5_test_accuracy": 0.82625,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9542500525712967,
|
179 |
+
"sae_top_1_test_accuracy": 0.84725,
|
180 |
+
"sae_top_2_test_accuracy": 0.85225,
|
181 |
+
"sae_top_5_test_accuracy": 0.87225,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9992000579833984,
|
190 |
+
"llm_top_1_test_accuracy": 0.6407999999999999,
|
191 |
+
"llm_top_2_test_accuracy": 0.7834,
|
192 |
+
"llm_top_5_test_accuracy": 0.9126000000000001,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9994000434875489,
|
198 |
+
"sae_top_1_test_accuracy": 0.8942,
|
199 |
+
"sae_top_2_test_accuracy": 0.9094,
|
200 |
+
"sae_top_5_test_accuracy": 0.9884000000000001,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3",
|
210 |
+
"sae_lens_version": "5.3.2",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 65536,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9480000734329224,
|
240 |
+
"1": 0.9690000414848328,
|
241 |
+
"2": 0.9490000605583191,
|
242 |
+
"6": 0.987000048160553,
|
243 |
+
"9": 0.9760000705718994
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.645,
|
275 |
+
"1": 0.591,
|
276 |
+
"2": 0.854,
|
277 |
+
"6": 0.832,
|
278 |
+
"9": 0.93
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.653,
|
282 |
+
"1": 0.639,
|
283 |
+
"2": 0.855,
|
284 |
+
"6": 0.981,
|
285 |
+
"9": 0.934
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.874,
|
289 |
+
"1": 0.833,
|
290 |
+
"2": 0.861,
|
291 |
+
"6": 0.981,
|
292 |
+
"9": 0.948
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9550000429153442,
|
298 |
+
"13": 0.9520000219345093,
|
299 |
+
"14": 0.9480000734329224,
|
300 |
+
"18": 0.9320000410079956,
|
301 |
+
"19": 0.9600000381469727
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9620000720024109,
|
305 |
+
"13": 0.9500000476837158,
|
306 |
+
"14": 0.9600000381469727,
|
307 |
+
"18": 0.9330000281333923,
|
308 |
+
"19": 0.9610000252723694
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.578,
|
312 |
+
"13": 0.673,
|
313 |
+
"14": 0.642,
|
314 |
+
"18": 0.695,
|
315 |
+
"19": 0.8
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.782,
|
319 |
+
"13": 0.718,
|
320 |
+
"14": 0.687,
|
321 |
+
"18": 0.721,
|
322 |
+
"19": 0.752
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.786,
|
326 |
+
"13": 0.742,
|
327 |
+
"14": 0.724,
|
328 |
+
"18": 0.727,
|
329 |
+
"19": 0.834
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.736,
|
333 |
+
"13": 0.655,
|
334 |
+
"14": 0.822,
|
335 |
+
"18": 0.677,
|
336 |
+
"19": 0.831
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.847,
|
340 |
+
"13": 0.681,
|
341 |
+
"14": 0.824,
|
342 |
+
"18": 0.696,
|
343 |
+
"19": 0.838
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.862,
|
347 |
+
"13": 0.772,
|
348 |
+
"14": 0.848,
|
349 |
+
"18": 0.742,
|
350 |
+
"19": 0.876
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9570000171661377,
|
356 |
+
"21": 0.9300000667572021,
|
357 |
+
"22": 0.909000039100647,
|
358 |
+
"25": 0.9600000381469727,
|
359 |
+
"26": 0.893000066280365
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9630000591278076,
|
363 |
+
"21": 0.9150000214576721,
|
364 |
+
"22": 0.9110000729560852,
|
365 |
+
"25": 0.9630000591278076,
|
366 |
+
"26": 0.893000066280365
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.707,
|
370 |
+
"21": 0.776,
|
371 |
+
"22": 0.65,
|
372 |
+
"25": 0.696,
|
373 |
+
"26": 0.624
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.798,
|
377 |
+
"21": 0.764,
|
378 |
+
"22": 0.649,
|
379 |
+
"25": 0.77,
|
380 |
+
"26": 0.684
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.875,
|
384 |
+
"21": 0.793,
|
385 |
+
"22": 0.713,
|
386 |
+
"25": 0.796,
|
387 |
+
"26": 0.669
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.868,
|
391 |
+
"21": 0.709,
|
392 |
+
"22": 0.607,
|
393 |
+
"25": 0.878,
|
394 |
+
"26": 0.568
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.877,
|
398 |
+
"21": 0.688,
|
399 |
+
"22": 0.642,
|
400 |
+
"25": 0.882,
|
401 |
+
"26": 0.632
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.907,
|
405 |
+
"21": 0.849,
|
406 |
+
"22": 0.816,
|
407 |
+
"25": 0.881,
|
408 |
+
"26": 0.729
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9390000700950623,
|
414 |
+
"2": 0.937000036239624,
|
415 |
+
"3": 0.9080000519752502,
|
416 |
+
"5": 0.9230000376701355,
|
417 |
+
"6": 0.8710000514984131
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9510000348091125,
|
421 |
+
"2": 0.9360000491142273,
|
422 |
+
"3": 0.9150000214576721,
|
423 |
+
"5": 0.9160000681877136,
|
424 |
+
"6": 0.8690000176429749
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.655,
|
428 |
+
"2": 0.589,
|
429 |
+
"3": 0.591,
|
430 |
+
"5": 0.576,
|
431 |
+
"6": 0.577
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.752,
|
435 |
+
"2": 0.636,
|
436 |
+
"3": 0.616,
|
437 |
+
"5": 0.611,
|
438 |
+
"6": 0.628
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.738,
|
442 |
+
"2": 0.665,
|
443 |
+
"3": 0.61,
|
444 |
+
"5": 0.642,
|
445 |
+
"6": 0.668
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.842,
|
449 |
+
"2": 0.757,
|
450 |
+
"3": 0.699,
|
451 |
+
"5": 0.806,
|
452 |
+
"6": 0.755
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.881,
|
456 |
+
"2": 0.778,
|
457 |
+
"3": 0.683,
|
458 |
+
"5": 0.807,
|
459 |
+
"6": 0.761
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.903,
|
463 |
+
"2": 0.835,
|
464 |
+
"3": 0.716,
|
465 |
+
"5": 0.835,
|
466 |
+
"6": 0.744
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9750000238418579,
|
472 |
+
"5.0": 0.9730000495910645
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9800000190734863,
|
476 |
+
"5.0": 0.9790000319480896
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.675,
|
492 |
+
"5.0": 0.675
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.682,
|
496 |
+
"5.0": 0.682
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.895,
|
500 |
+
"5.0": 0.895
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9520000219345093,
|
506 |
+
"Python": 0.9830000400543213,
|
507 |
+
"HTML": 0.9860000610351562,
|
508 |
+
"Java": 0.9620000720024109,
|
509 |
+
"PHP": 0.9550000429153442
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9580000638961792,
|
513 |
+
"Python": 0.9880000352859497,
|
514 |
+
"HTML": 0.9900000691413879,
|
515 |
+
"Java": 0.968000054359436,
|
516 |
+
"PHP": 0.9600000381469727
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.656,
|
520 |
+
"Python": 0.653,
|
521 |
+
"HTML": 0.794,
|
522 |
+
"Java": 0.628,
|
523 |
+
"PHP": 0.59
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.666,
|
527 |
+
"Python": 0.671,
|
528 |
+
"HTML": 0.795,
|
529 |
+
"Java": 0.692,
|
530 |
+
"PHP": 0.659
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.765,
|
534 |
+
"Python": 0.73,
|
535 |
+
"HTML": 0.898,
|
536 |
+
"Java": 0.742,
|
537 |
+
"PHP": 0.704
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.617,
|
541 |
+
"Python": 0.661,
|
542 |
+
"HTML": 0.651,
|
543 |
+
"Java": 0.593,
|
544 |
+
"PHP": 0.592
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.65,
|
548 |
+
"Python": 0.662,
|
549 |
+
"HTML": 0.799,
|
550 |
+
"Java": 0.655,
|
551 |
+
"PHP": 0.631
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.612,
|
555 |
+
"Python": 0.935,
|
556 |
+
"HTML": 0.827,
|
557 |
+
"Java": 0.675,
|
558 |
+
"PHP": 0.635
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9350000619888306,
|
564 |
+
"1": 0.9830000400543213,
|
565 |
+
"2": 0.9410000443458557,
|
566 |
+
"3": 0.9580000638961792
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9340000152587891,
|
570 |
+
"1": 0.9920000433921814,
|
571 |
+
"2": 0.9260000586509705,
|
572 |
+
"3": 0.9510000348091125
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.57,
|
576 |
+
"1": 0.673,
|
577 |
+
"2": 0.666,
|
578 |
+
"3": 0.79
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.808,
|
582 |
+
"1": 0.801,
|
583 |
+
"2": 0.705,
|
584 |
+
"3": 0.745
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.813,
|
588 |
+
"1": 0.888,
|
589 |
+
"2": 0.753,
|
590 |
+
"3": 0.851
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.861,
|
594 |
+
"1": 0.983,
|
595 |
+
"2": 0.82,
|
596 |
+
"3": 0.725
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.861,
|
600 |
+
"1": 0.981,
|
601 |
+
"2": 0.82,
|
602 |
+
"3": 0.747
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.872,
|
606 |
+
"1": 0.978,
|
607 |
+
"2": 0.835,
|
608 |
+
"3": 0.804
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 0.999000072479248,
|
615 |
+
"de": 1.0,
|
616 |
+
"es": 0.999000072479248,
|
617 |
+
"nl": 1.0
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 0.999000072479248,
|
621 |
+
"fr": 0.999000072479248,
|
622 |
+
"de": 0.999000072479248,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 0.999000072479248
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.742,
|
628 |
+
"fr": 0.584,
|
629 |
+
"de": 0.742,
|
630 |
+
"es": 0.49,
|
631 |
+
"nl": 0.646
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.834,
|
635 |
+
"fr": 0.582,
|
636 |
+
"de": 0.824,
|
637 |
+
"es": 0.915,
|
638 |
+
"nl": 0.762
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.89,
|
642 |
+
"fr": 0.926,
|
643 |
+
"de": 0.916,
|
644 |
+
"es": 0.98,
|
645 |
+
"nl": 0.851
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 1.0,
|
649 |
+
"fr": 0.993,
|
650 |
+
"de": 0.903,
|
651 |
+
"es": 0.901,
|
652 |
+
"nl": 0.674
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.998,
|
656 |
+
"fr": 0.996,
|
657 |
+
"de": 0.918,
|
658 |
+
"es": 0.915,
|
659 |
+
"nl": 0.72
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.998,
|
663 |
+
"fr": 0.996,
|
664 |
+
"de": 0.961,
|
665 |
+
"es": 0.99,
|
666 |
+
"nl": 0.997
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_notemp_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "dd1f056c-c374-4357-85c3-35eef0e30833",
|
30 |
+
"datetime_epoch_millis": 1736915581349,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9590812921524048,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620187500000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7230875,
|
36 |
+
"llm_top_5_test_accuracy": 0.7807875,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9492312934249639,
|
44 |
+
"sae_top_1_test_accuracy": 0.72013125,
|
45 |
+
"sae_top_2_test_accuracy": 0.77938125,
|
46 |
+
"sae_top_5_test_accuracy": 0.83975625,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9578000426292419,
|
65 |
+
"sae_top_1_test_accuracy": 0.7712,
|
66 |
+
"sae_top_2_test_accuracy": 0.8108000000000001,
|
67 |
+
"sae_top_5_test_accuracy": 0.8855999999999999,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9544000506401062,
|
76 |
+
"llm_top_1_test_accuracy": 0.6696000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7368,
|
78 |
+
"llm_top_5_test_accuracy": 0.7632,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9396000504493713,
|
84 |
+
"sae_top_1_test_accuracy": 0.7272000000000001,
|
85 |
+
"sae_top_2_test_accuracy": 0.7844,
|
86 |
+
"sae_top_5_test_accuracy": 0.8135999999999999,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9318000555038453,
|
95 |
+
"llm_top_1_test_accuracy": 0.6884,
|
96 |
+
"llm_top_2_test_accuracy": 0.737,
|
97 |
+
"llm_top_5_test_accuracy": 0.7492,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9218000531196594,
|
103 |
+
"sae_top_1_test_accuracy": 0.7380000000000001,
|
104 |
+
"sae_top_2_test_accuracy": 0.7646,
|
105 |
+
"sae_top_5_test_accuracy": 0.827,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9180000424385071,
|
114 |
+
"llm_top_1_test_accuracy": 0.6088,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466,
|
116 |
+
"llm_top_5_test_accuracy": 0.6822,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.906600022315979,
|
122 |
+
"sae_top_1_test_accuracy": 0.5969999999999999,
|
123 |
+
"sae_top_2_test_accuracy": 0.6942,
|
124 |
+
"sae_top_5_test_accuracy": 0.7504,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9580000340938568,
|
141 |
+
"sae_top_1_test_accuracy": 0.67,
|
142 |
+
"sae_top_2_test_accuracy": 0.786,
|
143 |
+
"sae_top_5_test_accuracy": 0.837,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9672000408172607,
|
152 |
+
"llm_top_1_test_accuracy": 0.6668000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6896,
|
154 |
+
"llm_top_5_test_accuracy": 0.7656000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9636000633239746,
|
160 |
+
"sae_top_1_test_accuracy": 0.6305999999999999,
|
161 |
+
"sae_top_2_test_accuracy": 0.641,
|
162 |
+
"sae_top_5_test_accuracy": 0.753,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9512500464916229,
|
171 |
+
"llm_top_1_test_accuracy": 0.6957500000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7795000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8245,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.950250044465065,
|
179 |
+
"sae_top_1_test_accuracy": 0.74125,
|
180 |
+
"sae_top_2_test_accuracy": 0.81925,
|
181 |
+
"sae_top_5_test_accuracy": 0.8632499999999999,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.6512,
|
191 |
+
"llm_top_2_test_accuracy": 0.7838,
|
192 |
+
"llm_top_5_test_accuracy": 0.9048,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9962000370025634,
|
198 |
+
"sae_top_1_test_accuracy": 0.8858,
|
199 |
+
"sae_top_2_test_accuracy": 0.9347999999999999,
|
200 |
+
"sae_top_5_test_accuracy": 0.9882,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_notemp_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_stop_grads_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "58421c5f-55a9-4806-82fa-ec39b27907c1",
|
30 |
+
"datetime_epoch_millis": 1736904776232,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9592062931507825,
|
34 |
+
"llm_top_1_test_accuracy": 0.66660625,
|
35 |
+
"llm_top_2_test_accuracy": 0.7192125,
|
36 |
+
"llm_top_5_test_accuracy": 0.7826437500000001,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9566625460982323,
|
44 |
+
"sae_top_1_test_accuracy": 0.7428187500000001,
|
45 |
+
"sae_top_2_test_accuracy": 0.7945687499999999,
|
46 |
+
"sae_top_5_test_accuracy": 0.8653937500000001,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9648000597953796,
|
65 |
+
"sae_top_1_test_accuracy": 0.7642,
|
66 |
+
"sae_top_2_test_accuracy": 0.8058,
|
67 |
+
"sae_top_5_test_accuracy": 0.8656,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9546000480651855,
|
76 |
+
"llm_top_1_test_accuracy": 0.6728,
|
77 |
+
"llm_top_2_test_accuracy": 0.73,
|
78 |
+
"llm_top_5_test_accuracy": 0.763,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9490000367164612,
|
84 |
+
"sae_top_1_test_accuracy": 0.7243999999999999,
|
85 |
+
"sae_top_2_test_accuracy": 0.7501999999999999,
|
86 |
+
"sae_top_5_test_accuracy": 0.835,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9290000438690186,
|
95 |
+
"llm_top_1_test_accuracy": 0.6904,
|
96 |
+
"llm_top_2_test_accuracy": 0.7402,
|
97 |
+
"llm_top_5_test_accuracy": 0.767,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9300000548362732,
|
103 |
+
"sae_top_1_test_accuracy": 0.6946000000000001,
|
104 |
+
"sae_top_2_test_accuracy": 0.7687999999999999,
|
105 |
+
"sae_top_5_test_accuracy": 0.8056000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9178000450134277,
|
114 |
+
"llm_top_1_test_accuracy": 0.6072,
|
115 |
+
"llm_top_2_test_accuracy": 0.6474,
|
116 |
+
"llm_top_5_test_accuracy": 0.6788000000000001,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9166000485420227,
|
122 |
+
"sae_top_1_test_accuracy": 0.6706,
|
123 |
+
"sae_top_2_test_accuracy": 0.7518,
|
124 |
+
"sae_top_5_test_accuracy": 0.8356,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9820000529289246,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9745000302791595,
|
141 |
+
"sae_top_1_test_accuracy": 0.838,
|
142 |
+
"sae_top_2_test_accuracy": 0.864,
|
143 |
+
"sae_top_5_test_accuracy": 0.905,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9696000576019287,
|
152 |
+
"llm_top_1_test_accuracy": 0.621,
|
153 |
+
"llm_top_2_test_accuracy": 0.7044,
|
154 |
+
"llm_top_5_test_accuracy": 0.7592,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9676000595092773,
|
160 |
+
"sae_top_1_test_accuracy": 0.643,
|
161 |
+
"sae_top_2_test_accuracy": 0.7186,
|
162 |
+
"sae_top_5_test_accuracy": 0.7884,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9512500613927841,
|
171 |
+
"llm_top_1_test_accuracy": 0.69625,
|
172 |
+
"llm_top_2_test_accuracy": 0.7365,
|
173 |
+
"llm_top_5_test_accuracy": 0.82875,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9520000517368317,
|
179 |
+
"sae_top_1_test_accuracy": 0.81575,
|
180 |
+
"sae_top_2_test_accuracy": 0.8447499999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.90275,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 1.0,
|
190 |
+
"llm_top_1_test_accuracy": 0.7295999999999999,
|
191 |
+
"llm_top_2_test_accuracy": 0.7838,
|
192 |
+
"llm_top_5_test_accuracy": 0.9076000000000001,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9988000273704529,
|
198 |
+
"sae_top_1_test_accuracy": 0.792,
|
199 |
+
"sae_top_2_test_accuracy": 0.8526,
|
200 |
+
"sae_top_5_test_accuracy": 0.9852000000000001,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_stop_grads_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_3",
|
210 |
+
"sae_lens_version": "5.3.0",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_stop_grads_v2_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "e48b5565-271e-4e75-8b28-ab83364892ee",
|
30 |
+
"datetime_epoch_millis": 1737009044580,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9597375441342594,
|
34 |
+
"llm_top_1_test_accuracy": 0.6529874999999999,
|
35 |
+
"llm_top_2_test_accuracy": 0.7199187499999999,
|
36 |
+
"llm_top_5_test_accuracy": 0.78049375,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9520375471562147,
|
44 |
+
"sae_top_1_test_accuracy": 0.7474562499999999,
|
45 |
+
"sae_top_2_test_accuracy": 0.7980625,
|
46 |
+
"sae_top_5_test_accuracy": 0.8693562499999999,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9634000539779664,
|
65 |
+
"sae_top_1_test_accuracy": 0.7824,
|
66 |
+
"sae_top_2_test_accuracy": 0.826,
|
67 |
+
"sae_top_5_test_accuracy": 0.85,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9564000606536865,
|
76 |
+
"llm_top_1_test_accuracy": 0.6716,
|
77 |
+
"llm_top_2_test_accuracy": 0.7292,
|
78 |
+
"llm_top_5_test_accuracy": 0.7602,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9472000479698182,
|
84 |
+
"sae_top_1_test_accuracy": 0.7353999999999999,
|
85 |
+
"sae_top_2_test_accuracy": 0.7543999999999998,
|
86 |
+
"sae_top_5_test_accuracy": 0.8320000000000001,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9332000374794006,
|
95 |
+
"llm_top_1_test_accuracy": 0.6866,
|
96 |
+
"llm_top_2_test_accuracy": 0.7362,
|
97 |
+
"llm_top_5_test_accuracy": 0.7596,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9234000444412231,
|
103 |
+
"sae_top_1_test_accuracy": 0.7289999999999999,
|
104 |
+
"sae_top_2_test_accuracy": 0.7530000000000001,
|
105 |
+
"sae_top_5_test_accuracy": 0.8482,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9140000343322754,
|
114 |
+
"llm_top_1_test_accuracy": 0.603,
|
115 |
+
"llm_top_2_test_accuracy": 0.6449999999999999,
|
116 |
+
"llm_top_5_test_accuracy": 0.6716,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9088000416755676,
|
122 |
+
"sae_top_1_test_accuracy": 0.6204,
|
123 |
+
"sae_top_2_test_accuracy": 0.6860000000000002,
|
124 |
+
"sae_top_5_test_accuracy": 0.8097999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9825000464916229,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9650000333786011,
|
141 |
+
"sae_top_1_test_accuracy": 0.77,
|
142 |
+
"sae_top_2_test_accuracy": 0.845,
|
143 |
+
"sae_top_5_test_accuracy": 0.873,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9740000486373901,
|
152 |
+
"llm_top_1_test_accuracy": 0.6654,
|
153 |
+
"llm_top_2_test_accuracy": 0.6928000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7612,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9660000562667846,
|
160 |
+
"sae_top_1_test_accuracy": 0.6466000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.788,
|
162 |
+
"sae_top_5_test_accuracy": 0.8774,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9490000456571579,
|
171 |
+
"llm_top_1_test_accuracy": 0.6375000000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7637499999999999,
|
173 |
+
"llm_top_5_test_accuracy": 0.82875,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9465000480413437,
|
179 |
+
"sae_top_1_test_accuracy": 0.79925,
|
180 |
+
"sae_top_2_test_accuracy": 0.8205,
|
181 |
+
"sae_top_5_test_accuracy": 0.8852500000000001,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9994000434875489,
|
190 |
+
"llm_top_1_test_accuracy": 0.6442,
|
191 |
+
"llm_top_2_test_accuracy": 0.781,
|
192 |
+
"llm_top_5_test_accuracy": 0.9057999999999999,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9960000514984131,
|
198 |
+
"sae_top_1_test_accuracy": 0.8966,
|
199 |
+
"sae_top_2_test_accuracy": 0.9116,
|
200 |
+
"sae_top_5_test_accuracy": 0.9792,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_stop_grads_v2_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1",
|
210 |
+
"sae_lens_version": "5.3.0",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9440000653266907,
|
240 |
+
"1": 0.9630000591278076,
|
241 |
+
"2": 0.9510000348091125,
|
242 |
+
"6": 0.987000048160553,
|
243 |
+
"9": 0.9720000624656677
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9510000348091125,
|
247 |
+
"1": 0.9670000672340393,
|
248 |
+
"2": 0.9520000219345093,
|
249 |
+
"6": 0.9930000305175781,
|
250 |
+
"9": 0.984000027179718
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.568,
|
254 |
+
"1": 0.629,
|
255 |
+
"2": 0.679,
|
256 |
+
"6": 0.791,
|
257 |
+
"9": 0.551
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.585,
|
261 |
+
"1": 0.666,
|
262 |
+
"2": 0.673,
|
263 |
+
"6": 0.801,
|
264 |
+
"9": 0.712
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.72,
|
268 |
+
"1": 0.707,
|
269 |
+
"2": 0.764,
|
270 |
+
"6": 0.899,
|
271 |
+
"9": 0.864
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.628,
|
275 |
+
"1": 0.647,
|
276 |
+
"2": 0.897,
|
277 |
+
"6": 0.815,
|
278 |
+
"9": 0.925
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.689,
|
282 |
+
"1": 0.642,
|
283 |
+
"2": 0.9,
|
284 |
+
"6": 0.972,
|
285 |
+
"9": 0.927
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.729,
|
289 |
+
"1": 0.693,
|
290 |
+
"2": 0.901,
|
291 |
+
"6": 0.978,
|
292 |
+
"9": 0.949
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9610000252723694,
|
298 |
+
"13": 0.940000057220459,
|
299 |
+
"14": 0.9480000734329224,
|
300 |
+
"18": 0.9280000329017639,
|
301 |
+
"19": 0.9590000510215759
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9660000205039978,
|
305 |
+
"13": 0.9480000734329224,
|
306 |
+
"14": 0.9620000720024109,
|
307 |
+
"18": 0.9390000700950623,
|
308 |
+
"19": 0.9670000672340393
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.57,
|
312 |
+
"13": 0.67,
|
313 |
+
"14": 0.639,
|
314 |
+
"18": 0.691,
|
315 |
+
"19": 0.788
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.751,
|
319 |
+
"13": 0.721,
|
320 |
+
"14": 0.684,
|
321 |
+
"18": 0.714,
|
322 |
+
"19": 0.776
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.772,
|
326 |
+
"13": 0.748,
|
327 |
+
"14": 0.727,
|
328 |
+
"18": 0.722,
|
329 |
+
"19": 0.832
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.602,
|
333 |
+
"13": 0.687,
|
334 |
+
"14": 0.841,
|
335 |
+
"18": 0.698,
|
336 |
+
"19": 0.849
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.715,
|
340 |
+
"13": 0.679,
|
341 |
+
"14": 0.845,
|
342 |
+
"18": 0.696,
|
343 |
+
"19": 0.837
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.933,
|
347 |
+
"13": 0.796,
|
348 |
+
"14": 0.849,
|
349 |
+
"18": 0.74,
|
350 |
+
"19": 0.842
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9570000171661377,
|
356 |
+
"21": 0.9220000505447388,
|
357 |
+
"22": 0.9020000696182251,
|
358 |
+
"25": 0.956000030040741,
|
359 |
+
"26": 0.8800000548362732
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9610000252723694,
|
363 |
+
"21": 0.9310000538825989,
|
364 |
+
"22": 0.9240000247955322,
|
365 |
+
"25": 0.9570000171661377,
|
366 |
+
"26": 0.893000066280365
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.703,
|
370 |
+
"21": 0.759,
|
371 |
+
"22": 0.647,
|
372 |
+
"25": 0.695,
|
373 |
+
"26": 0.629
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.801,
|
377 |
+
"21": 0.774,
|
378 |
+
"22": 0.66,
|
379 |
+
"25": 0.751,
|
380 |
+
"26": 0.695
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.812,
|
384 |
+
"21": 0.776,
|
385 |
+
"22": 0.725,
|
386 |
+
"25": 0.801,
|
387 |
+
"26": 0.684
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.861,
|
391 |
+
"21": 0.681,
|
392 |
+
"22": 0.62,
|
393 |
+
"25": 0.872,
|
394 |
+
"26": 0.611
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.883,
|
398 |
+
"21": 0.745,
|
399 |
+
"22": 0.631,
|
400 |
+
"25": 0.867,
|
401 |
+
"26": 0.639
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.935,
|
405 |
+
"21": 0.766,
|
406 |
+
"22": 0.884,
|
407 |
+
"25": 0.91,
|
408 |
+
"26": 0.746
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9240000247955322,
|
414 |
+
"2": 0.9290000200271606,
|
415 |
+
"3": 0.9020000696182251,
|
416 |
+
"5": 0.9230000376701355,
|
417 |
+
"6": 0.8660000562667847
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9450000524520874,
|
421 |
+
"2": 0.9280000329017639,
|
422 |
+
"3": 0.9180000424385071,
|
423 |
+
"5": 0.9200000166893005,
|
424 |
+
"6": 0.859000027179718
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.654,
|
428 |
+
"2": 0.599,
|
429 |
+
"3": 0.612,
|
430 |
+
"5": 0.572,
|
431 |
+
"6": 0.578
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.741,
|
435 |
+
"2": 0.63,
|
436 |
+
"3": 0.601,
|
437 |
+
"5": 0.622,
|
438 |
+
"6": 0.631
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.774,
|
442 |
+
"2": 0.63,
|
443 |
+
"3": 0.663,
|
444 |
+
"5": 0.63,
|
445 |
+
"6": 0.661
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.694,
|
449 |
+
"2": 0.631,
|
450 |
+
"3": 0.575,
|
451 |
+
"5": 0.56,
|
452 |
+
"6": 0.642
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.897,
|
456 |
+
"2": 0.657,
|
457 |
+
"3": 0.682,
|
458 |
+
"5": 0.543,
|
459 |
+
"6": 0.651
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.911,
|
463 |
+
"2": 0.866,
|
464 |
+
"3": 0.728,
|
465 |
+
"5": 0.783,
|
466 |
+
"6": 0.761
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9650000333786011,
|
472 |
+
"5.0": 0.9650000333786011
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.9820000529289246,
|
476 |
+
"5.0": 0.9830000400543213
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.672,
|
480 |
+
"5.0": 0.672
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.724,
|
484 |
+
"5.0": 0.724
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.766,
|
488 |
+
"5.0": 0.766
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.77,
|
492 |
+
"5.0": 0.77
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.845,
|
496 |
+
"5.0": 0.845
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.873,
|
500 |
+
"5.0": 0.873
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9590000510215759,
|
506 |
+
"Python": 0.9760000705718994,
|
507 |
+
"HTML": 0.9820000529289246,
|
508 |
+
"Java": 0.9630000591278076,
|
509 |
+
"PHP": 0.9500000476837158
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9650000333786011,
|
513 |
+
"Python": 0.9830000400543213,
|
514 |
+
"HTML": 0.9900000691413879,
|
515 |
+
"Java": 0.9730000495910645,
|
516 |
+
"PHP": 0.9590000510215759
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.67,
|
520 |
+
"Python": 0.634,
|
521 |
+
"HTML": 0.794,
|
522 |
+
"Java": 0.636,
|
523 |
+
"PHP": 0.593
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.671,
|
527 |
+
"Python": 0.682,
|
528 |
+
"HTML": 0.792,
|
529 |
+
"Java": 0.677,
|
530 |
+
"PHP": 0.642
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.752,
|
534 |
+
"Python": 0.721,
|
535 |
+
"HTML": 0.899,
|
536 |
+
"Java": 0.738,
|
537 |
+
"PHP": 0.696
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.641,
|
541 |
+
"Python": 0.628,
|
542 |
+
"HTML": 0.716,
|
543 |
+
"Java": 0.648,
|
544 |
+
"PHP": 0.6
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.863,
|
548 |
+
"Python": 0.669,
|
549 |
+
"HTML": 0.883,
|
550 |
+
"Java": 0.644,
|
551 |
+
"PHP": 0.881
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.885,
|
555 |
+
"Python": 0.924,
|
556 |
+
"HTML": 0.946,
|
557 |
+
"Java": 0.739,
|
558 |
+
"PHP": 0.893
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9310000538825989,
|
564 |
+
"1": 0.9850000739097595,
|
565 |
+
"2": 0.9360000491142273,
|
566 |
+
"3": 0.9340000152587891
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9390000700950623,
|
570 |
+
"1": 0.9890000224113464,
|
571 |
+
"2": 0.9240000247955322,
|
572 |
+
"3": 0.9440000653266907
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.563,
|
576 |
+
"1": 0.67,
|
577 |
+
"2": 0.676,
|
578 |
+
"3": 0.641
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.796,
|
582 |
+
"1": 0.806,
|
583 |
+
"2": 0.701,
|
584 |
+
"3": 0.752
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.821,
|
588 |
+
"1": 0.88,
|
589 |
+
"2": 0.762,
|
590 |
+
"3": 0.852
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.744,
|
594 |
+
"1": 0.933,
|
595 |
+
"2": 0.804,
|
596 |
+
"3": 0.716
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.815,
|
600 |
+
"1": 0.944,
|
601 |
+
"2": 0.807,
|
602 |
+
"3": 0.716
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.853,
|
606 |
+
"1": 0.957,
|
607 |
+
"2": 0.85,
|
608 |
+
"3": 0.881
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 0.9950000643730164,
|
615 |
+
"de": 0.9960000514984131,
|
616 |
+
"es": 0.9980000257492065,
|
617 |
+
"nl": 0.9920000433921814
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 0.999000072479248,
|
621 |
+
"fr": 0.999000072479248,
|
622 |
+
"de": 1.0,
|
623 |
+
"es": 1.0,
|
624 |
+
"nl": 0.999000072479248
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.751,
|
628 |
+
"fr": 0.583,
|
629 |
+
"de": 0.755,
|
630 |
+
"es": 0.476,
|
631 |
+
"nl": 0.656
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.836,
|
635 |
+
"fr": 0.588,
|
636 |
+
"de": 0.822,
|
637 |
+
"es": 0.909,
|
638 |
+
"nl": 0.75
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.882,
|
642 |
+
"fr": 0.913,
|
643 |
+
"de": 0.885,
|
644 |
+
"es": 0.982,
|
645 |
+
"nl": 0.867
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.99,
|
649 |
+
"fr": 0.984,
|
650 |
+
"de": 0.862,
|
651 |
+
"es": 0.902,
|
652 |
+
"nl": 0.745
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.998,
|
656 |
+
"fr": 0.988,
|
657 |
+
"de": 0.916,
|
658 |
+
"es": 0.922,
|
659 |
+
"nl": 0.734
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.998,
|
663 |
+
"fr": 0.988,
|
664 |
+
"de": 0.92,
|
665 |
+
"es": 0.992,
|
666 |
+
"nl": 0.998
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_1_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "1f8c3c41-2cb9-4a14-958e-4211f5b01b6d",
|
30 |
+
"datetime_epoch_millis": 1736916187747,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9590812921524048,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620187500000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7230875,
|
36 |
+
"llm_top_5_test_accuracy": 0.7807875,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9498875483870506,
|
44 |
+
"sae_top_1_test_accuracy": 0.709875,
|
45 |
+
"sae_top_2_test_accuracy": 0.762275,
|
46 |
+
"sae_top_5_test_accuracy": 0.84881875,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9580000519752503,
|
65 |
+
"sae_top_1_test_accuracy": 0.7266,
|
66 |
+
"sae_top_2_test_accuracy": 0.7979999999999999,
|
67 |
+
"sae_top_5_test_accuracy": 0.8446,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9544000506401062,
|
76 |
+
"llm_top_1_test_accuracy": 0.6696000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7368,
|
78 |
+
"llm_top_5_test_accuracy": 0.7632,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9384000420570373,
|
84 |
+
"sae_top_1_test_accuracy": 0.6910000000000001,
|
85 |
+
"sae_top_2_test_accuracy": 0.755,
|
86 |
+
"sae_top_5_test_accuracy": 0.8507999999999999,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9318000555038453,
|
95 |
+
"llm_top_1_test_accuracy": 0.6884,
|
96 |
+
"llm_top_2_test_accuracy": 0.737,
|
97 |
+
"llm_top_5_test_accuracy": 0.7492,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9174000501632691,
|
103 |
+
"sae_top_1_test_accuracy": 0.7922,
|
104 |
+
"sae_top_2_test_accuracy": 0.8071999999999999,
|
105 |
+
"sae_top_5_test_accuracy": 0.8478,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9180000424385071,
|
114 |
+
"llm_top_1_test_accuracy": 0.6088,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466,
|
116 |
+
"llm_top_5_test_accuracy": 0.6822,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9082000494003296,
|
122 |
+
"sae_top_1_test_accuracy": 0.651,
|
123 |
+
"sae_top_2_test_accuracy": 0.732,
|
124 |
+
"sae_top_5_test_accuracy": 0.7934,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9640000462532043,
|
141 |
+
"sae_top_1_test_accuracy": 0.75,
|
142 |
+
"sae_top_2_test_accuracy": 0.764,
|
143 |
+
"sae_top_5_test_accuracy": 0.824,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9672000408172607,
|
152 |
+
"llm_top_1_test_accuracy": 0.6668000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6896,
|
154 |
+
"llm_top_5_test_accuracy": 0.7656000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9658000588417053,
|
160 |
+
"sae_top_1_test_accuracy": 0.6250000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.6712,
|
162 |
+
"sae_top_5_test_accuracy": 0.8301999999999999,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9512500464916229,
|
171 |
+
"llm_top_1_test_accuracy": 0.6957500000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7795000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8245,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9515000581741333,
|
179 |
+
"sae_top_1_test_accuracy": 0.711,
|
180 |
+
"sae_top_2_test_accuracy": 0.74,
|
181 |
+
"sae_top_5_test_accuracy": 0.8227500000000001,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.6512,
|
191 |
+
"llm_top_2_test_accuracy": 0.7838,
|
192 |
+
"llm_top_5_test_accuracy": 0.9048,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9958000302314758,
|
198 |
+
"sae_top_1_test_accuracy": 0.7322,
|
199 |
+
"sae_top_2_test_accuracy": 0.8308,
|
200 |
+
"sae_top_5_test_accuracy": 0.977,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_temp_1_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_1_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_5_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "97bad8af-5cab-49f5-9c71-720843a9daca",
|
30 |
+
"datetime_epoch_millis": 1736916714039,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9590812921524048,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620187500000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7230875,
|
36 |
+
"llm_top_5_test_accuracy": 0.7807875,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9588375430554151,
|
44 |
+
"sae_top_1_test_accuracy": 0.76404375,
|
45 |
+
"sae_top_2_test_accuracy": 0.80076875,
|
46 |
+
"sae_top_5_test_accuracy": 0.8549937500000001,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9676000356674195,
|
65 |
+
"sae_top_1_test_accuracy": 0.813,
|
66 |
+
"sae_top_2_test_accuracy": 0.8206,
|
67 |
+
"sae_top_5_test_accuracy": 0.8666,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9544000506401062,
|
76 |
+
"llm_top_1_test_accuracy": 0.6696000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7368,
|
78 |
+
"llm_top_5_test_accuracy": 0.7632,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.954200029373169,
|
84 |
+
"sae_top_1_test_accuracy": 0.7527999999999999,
|
85 |
+
"sae_top_2_test_accuracy": 0.7664000000000001,
|
86 |
+
"sae_top_5_test_accuracy": 0.8065999999999999,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9318000555038453,
|
95 |
+
"llm_top_1_test_accuracy": 0.6884,
|
96 |
+
"llm_top_2_test_accuracy": 0.737,
|
97 |
+
"llm_top_5_test_accuracy": 0.7492,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9326000452041626,
|
103 |
+
"sae_top_1_test_accuracy": 0.6666000000000001,
|
104 |
+
"sae_top_2_test_accuracy": 0.7892,
|
105 |
+
"sae_top_5_test_accuracy": 0.8210000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9180000424385071,
|
114 |
+
"llm_top_1_test_accuracy": 0.6088,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466,
|
116 |
+
"llm_top_5_test_accuracy": 0.6822,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9118000388145446,
|
122 |
+
"sae_top_1_test_accuracy": 0.7638,
|
123 |
+
"sae_top_2_test_accuracy": 0.7946,
|
124 |
+
"sae_top_5_test_accuracy": 0.8342,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9770000576972961,
|
141 |
+
"sae_top_1_test_accuracy": 0.692,
|
142 |
+
"sae_top_2_test_accuracy": 0.76,
|
143 |
+
"sae_top_5_test_accuracy": 0.892,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9672000408172607,
|
152 |
+
"llm_top_1_test_accuracy": 0.6668000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6896,
|
154 |
+
"llm_top_5_test_accuracy": 0.7656000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9728000521659851,
|
160 |
+
"sae_top_1_test_accuracy": 0.6848000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.7058,
|
162 |
+
"sae_top_5_test_accuracy": 0.7646,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9512500464916229,
|
171 |
+
"llm_top_1_test_accuracy": 0.6957500000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7795000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8245,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9555000513792038,
|
179 |
+
"sae_top_1_test_accuracy": 0.85475,
|
180 |
+
"sae_top_2_test_accuracy": 0.8687499999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.9137500000000001,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.6512,
|
191 |
+
"llm_top_2_test_accuracy": 0.7838,
|
192 |
+
"llm_top_5_test_accuracy": 0.9048,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9992000341415406,
|
198 |
+
"sae_top_1_test_accuracy": 0.8845999999999998,
|
199 |
+
"sae_top_2_test_accuracy": 0.9007999999999999,
|
200 |
+
"sae_top_5_test_accuracy": 0.9411999999999999,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_temp_1_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_5",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_2_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "12d38318-3938-4f41-96bf-459f9872e7fc",
|
30 |
+
"datetime_epoch_millis": 1736917019039,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9590812921524048,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620187500000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7230875,
|
36 |
+
"llm_top_5_test_accuracy": 0.7807875,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9571312982589005,
|
44 |
+
"sae_top_1_test_accuracy": 0.7680750000000001,
|
45 |
+
"sae_top_2_test_accuracy": 0.8130749999999999,
|
46 |
+
"sae_top_5_test_accuracy": 0.8787312500000001,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9662000417709351,
|
65 |
+
"sae_top_1_test_accuracy": 0.8225999999999999,
|
66 |
+
"sae_top_2_test_accuracy": 0.8380000000000001,
|
67 |
+
"sae_top_5_test_accuracy": 0.869,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9544000506401062,
|
76 |
+
"llm_top_1_test_accuracy": 0.6696000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7368,
|
78 |
+
"llm_top_5_test_accuracy": 0.7632,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9464000344276429,
|
84 |
+
"sae_top_1_test_accuracy": 0.7142000000000001,
|
85 |
+
"sae_top_2_test_accuracy": 0.7996000000000001,
|
86 |
+
"sae_top_5_test_accuracy": 0.8484,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9318000555038453,
|
95 |
+
"llm_top_1_test_accuracy": 0.6884,
|
96 |
+
"llm_top_2_test_accuracy": 0.737,
|
97 |
+
"llm_top_5_test_accuracy": 0.7492,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9308000445365906,
|
103 |
+
"sae_top_1_test_accuracy": 0.7678,
|
104 |
+
"sae_top_2_test_accuracy": 0.8228,
|
105 |
+
"sae_top_5_test_accuracy": 0.8418000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9180000424385071,
|
114 |
+
"llm_top_1_test_accuracy": 0.6088,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466,
|
116 |
+
"llm_top_5_test_accuracy": 0.6822,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9232000470161438,
|
122 |
+
"sae_top_1_test_accuracy": 0.6744000000000001,
|
123 |
+
"sae_top_2_test_accuracy": 0.7314,
|
124 |
+
"sae_top_5_test_accuracy": 0.8182,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9715000689029694,
|
141 |
+
"sae_top_1_test_accuracy": 0.862,
|
142 |
+
"sae_top_2_test_accuracy": 0.877,
|
143 |
+
"sae_top_5_test_accuracy": 0.921,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9672000408172607,
|
152 |
+
"llm_top_1_test_accuracy": 0.6668000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6896,
|
154 |
+
"llm_top_5_test_accuracy": 0.7656000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9702000498771668,
|
160 |
+
"sae_top_1_test_accuracy": 0.6322000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.7020000000000002,
|
162 |
+
"sae_top_5_test_accuracy": 0.8628,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9512500464916229,
|
171 |
+
"llm_top_1_test_accuracy": 0.6957500000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7795000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8245,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9497500509023666,
|
179 |
+
"sae_top_1_test_accuracy": 0.783,
|
180 |
+
"sae_top_2_test_accuracy": 0.8160000000000001,
|
181 |
+
"sae_top_5_test_accuracy": 0.8862500000000001,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.6512,
|
191 |
+
"llm_top_2_test_accuracy": 0.7838,
|
192 |
+
"llm_top_5_test_accuracy": 0.9048,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9990000486373901,
|
198 |
+
"sae_top_1_test_accuracy": 0.8884000000000001,
|
199 |
+
"sae_top_2_test_accuracy": 0.9178000000000001,
|
200 |
+
"sae_top_5_test_accuracy": 0.9823999999999999,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_temp_2_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_3_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "cf83d9a3-a61b-4cea-a3b6-4b10e7c7e65e",
|
30 |
+
"datetime_epoch_millis": 1736917532444,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9590812921524048,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620187500000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7230875,
|
36 |
+
"llm_top_5_test_accuracy": 0.7807875,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9520562894642353,
|
44 |
+
"sae_top_1_test_accuracy": 0.7387687499999999,
|
45 |
+
"sae_top_2_test_accuracy": 0.7993812499999999,
|
46 |
+
"sae_top_5_test_accuracy": 0.87731875,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9598000407218933,
|
65 |
+
"sae_top_1_test_accuracy": 0.7684000000000001,
|
66 |
+
"sae_top_2_test_accuracy": 0.8154,
|
67 |
+
"sae_top_5_test_accuracy": 0.8732000000000001,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9544000506401062,
|
76 |
+
"llm_top_1_test_accuracy": 0.6696000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7368,
|
78 |
+
"llm_top_5_test_accuracy": 0.7632,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9446000456809998,
|
84 |
+
"sae_top_1_test_accuracy": 0.7329999999999999,
|
85 |
+
"sae_top_2_test_accuracy": 0.7953999999999999,
|
86 |
+
"sae_top_5_test_accuracy": 0.8352,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9318000555038453,
|
95 |
+
"llm_top_1_test_accuracy": 0.6884,
|
96 |
+
"llm_top_2_test_accuracy": 0.737,
|
97 |
+
"llm_top_5_test_accuracy": 0.7492,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9206000447273255,
|
103 |
+
"sae_top_1_test_accuracy": 0.6492,
|
104 |
+
"sae_top_2_test_accuracy": 0.8148,
|
105 |
+
"sae_top_5_test_accuracy": 0.8558000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9180000424385071,
|
114 |
+
"llm_top_1_test_accuracy": 0.6088,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466,
|
116 |
+
"llm_top_5_test_accuracy": 0.6822,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9146000504493713,
|
122 |
+
"sae_top_1_test_accuracy": 0.7203999999999999,
|
123 |
+
"sae_top_2_test_accuracy": 0.7226000000000001,
|
124 |
+
"sae_top_5_test_accuracy": 0.8253999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9670000374317169,
|
141 |
+
"sae_top_1_test_accuracy": 0.757,
|
142 |
+
"sae_top_2_test_accuracy": 0.835,
|
143 |
+
"sae_top_5_test_accuracy": 0.896,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9672000408172607,
|
152 |
+
"llm_top_1_test_accuracy": 0.6668000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6896,
|
154 |
+
"llm_top_5_test_accuracy": 0.7656000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9662000417709351,
|
160 |
+
"sae_top_1_test_accuracy": 0.6409999999999999,
|
161 |
+
"sae_top_2_test_accuracy": 0.6952,
|
162 |
+
"sae_top_5_test_accuracy": 0.8674,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9512500464916229,
|
171 |
+
"llm_top_1_test_accuracy": 0.6957500000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7795000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8245,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9452500343322754,
|
179 |
+
"sae_top_1_test_accuracy": 0.76275,
|
180 |
+
"sae_top_2_test_accuracy": 0.79425,
|
181 |
+
"sae_top_5_test_accuracy": 0.88375,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.6512,
|
191 |
+
"llm_top_2_test_accuracy": 0.7838,
|
192 |
+
"llm_top_5_test_accuracy": 0.9048,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9984000205993653,
|
198 |
+
"sae_top_1_test_accuracy": 0.8783999999999998,
|
199 |
+
"sae_top_2_test_accuracy": 0.9224,
|
200 |
+
"sae_top_5_test_accuracy": 0.9818,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_temp_3_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
sparse_probing/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_temp_3_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_4_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 32,
|
20 |
+
"llm_dtype": "bfloat16",
|
21 |
+
"model_name": "gemma-2-2b",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "e74f1dbe-5ae5-4133-ba70-87a1240169fb",
|
30 |
+
"datetime_epoch_millis": 1736917840142,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9590812921524048,
|
34 |
+
"llm_top_1_test_accuracy": 0.6620187500000001,
|
35 |
+
"llm_top_2_test_accuracy": 0.7230875,
|
36 |
+
"llm_top_5_test_accuracy": 0.7807875,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9579125415533781,
|
44 |
+
"sae_top_1_test_accuracy": 0.7661937499999999,
|
45 |
+
"sae_top_2_test_accuracy": 0.8245812499999999,
|
46 |
+
"sae_top_5_test_accuracy": 0.86499375,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9694000363349915,
|
57 |
+
"llm_top_1_test_accuracy": 0.6436000000000001,
|
58 |
+
"llm_top_2_test_accuracy": 0.6874,
|
59 |
+
"llm_top_5_test_accuracy": 0.7908,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9680000424385071,
|
65 |
+
"sae_top_1_test_accuracy": 0.7802,
|
66 |
+
"sae_top_2_test_accuracy": 0.8469999999999999,
|
67 |
+
"sae_top_5_test_accuracy": 0.8695999999999999,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9544000506401062,
|
76 |
+
"llm_top_1_test_accuracy": 0.6696000000000001,
|
77 |
+
"llm_top_2_test_accuracy": 0.7368,
|
78 |
+
"llm_top_5_test_accuracy": 0.7632,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9536000490188599,
|
84 |
+
"sae_top_1_test_accuracy": 0.7604,
|
85 |
+
"sae_top_2_test_accuracy": 0.8029999999999999,
|
86 |
+
"sae_top_5_test_accuracy": 0.8266,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9318000555038453,
|
95 |
+
"llm_top_1_test_accuracy": 0.6884,
|
96 |
+
"llm_top_2_test_accuracy": 0.737,
|
97 |
+
"llm_top_5_test_accuracy": 0.7492,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9284000396728516,
|
103 |
+
"sae_top_1_test_accuracy": 0.7369999999999999,
|
104 |
+
"sae_top_2_test_accuracy": 0.7496,
|
105 |
+
"sae_top_5_test_accuracy": 0.8140000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.9180000424385071,
|
114 |
+
"llm_top_1_test_accuracy": 0.6088,
|
115 |
+
"llm_top_2_test_accuracy": 0.6466,
|
116 |
+
"llm_top_5_test_accuracy": 0.6822,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.9178000450134277,
|
122 |
+
"sae_top_1_test_accuracy": 0.7112,
|
123 |
+
"sae_top_2_test_accuracy": 0.7686,
|
124 |
+
"sae_top_5_test_accuracy": 0.7902000000000001,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.9810000360012054,
|
133 |
+
"llm_top_1_test_accuracy": 0.672,
|
134 |
+
"llm_top_2_test_accuracy": 0.724,
|
135 |
+
"llm_top_5_test_accuracy": 0.766,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9750000536441803,
|
141 |
+
"sae_top_1_test_accuracy": 0.718,
|
142 |
+
"sae_top_2_test_accuracy": 0.87,
|
143 |
+
"sae_top_5_test_accuracy": 0.951,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9672000408172607,
|
152 |
+
"llm_top_1_test_accuracy": 0.6668000000000001,
|
153 |
+
"llm_top_2_test_accuracy": 0.6896,
|
154 |
+
"llm_top_5_test_accuracy": 0.7656000000000001,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9672000408172607,
|
160 |
+
"sae_top_1_test_accuracy": 0.6266,
|
161 |
+
"sae_top_2_test_accuracy": 0.7336,
|
162 |
+
"sae_top_5_test_accuracy": 0.8114000000000001,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9512500464916229,
|
171 |
+
"llm_top_1_test_accuracy": 0.6957500000000001,
|
172 |
+
"llm_top_2_test_accuracy": 0.7795000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8245,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.953500047326088,
|
179 |
+
"sae_top_1_test_accuracy": 0.85675,
|
180 |
+
"sae_top_2_test_accuracy": 0.88625,
|
181 |
+
"sae_top_5_test_accuracy": 0.90575,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9996000289916992,
|
190 |
+
"llm_top_1_test_accuracy": 0.6512,
|
191 |
+
"llm_top_2_test_accuracy": 0.7838,
|
192 |
+
"llm_top_5_test_accuracy": 0.9048,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9998000144958497,
|
198 |
+
"sae_top_1_test_accuracy": 0.9394,
|
199 |
+
"sae_top_2_test_accuracy": 0.9385999999999999,
|
200 |
+
"sae_top_5_test_accuracy": 0.9513999999999999,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatroyshkaBatchTopKTrainer_temp_3_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_4",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "gemma-2-2b",
|
213 |
+
"d_in": 2304,
|
214 |
+
"d_sae": 16384,
|
215 |
+
"hook_layer": 12,
|
216 |
+
"hook_name": "blocks.12.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "matryoshka_batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "bfloat16",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": -100000,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": null
|
237 |
+
}
|
unlearning/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "f1ecdaa3-96d7-4052-b690-47d280d271dc",
|
37 |
+
"datetime_epoch_millis": 1737732426670,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.0731707215309143
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "f2d1d982515d2dee706eb23a1ca459b308988764",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_0",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "97f90ad3-4755-4c59-a62f-84a751c0b398",
|
37 |
+
"datetime_epoch_millis": 1737733934907,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.08255159854888916
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "f2d1d982515d2dee706eb23a1ca459b308988764",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_3",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_4_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "658acd5d-8eb7-4a70-9113-c298a84a841e",
|
37 |
+
"datetime_epoch_millis": 1737734452042,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.06003749370574951
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "f2d1d982515d2dee706eb23a1ca459b308988764",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matryoshka_0121_MatryoshkaBatchTopKTrainer_gemma_batch_topk_65k_google_gemma-2-2b_batch_top_k_resid_post_layer_12_trainer_4",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "bc713851-ff13-416f-9f69-449508da6850",
|
37 |
+
"datetime_epoch_millis": 1736902388347,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.11069416999816895
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_0",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "b63d33ce-a5bb-4a88-ad6a-0cb118fdd6cb",
|
37 |
+
"datetime_epoch_millis": 1736905975781,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.1538461446762085
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_1",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "3be23775-2657-419e-a96b-372525261c92",
|
37 |
+
"datetime_epoch_millis": 1736903822805,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.13508445024490356
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_2",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "c1db810e-e429-489e-8738-f30e00b3fa33",
|
37 |
+
"datetime_epoch_millis": 1736903106728,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.06003749370574951
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_3",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_5_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "3fd70108-37cf-419c-a036-50023c285177",
|
37 |
+
"datetime_epoch_millis": 1736905259644,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.04315197467803955
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "ec5efa820ceb6e88d53667f247bb2a09efca609f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_BatchTopKTrainer_baseline_google_gemma-2-2b_ctx1024_0114_resid_post_layer_12_trainer_5",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "b166d976-48b5-4646-8b59-69bf7bf6f972",
|
37 |
+
"datetime_epoch_millis": 1737163136376,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.06378984451293945
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0",
|
47 |
+
"sae_lens_version": "5.3.1",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "49309dcc-6978-466a-8a1e-d927b47e2b25",
|
37 |
+
"datetime_epoch_millis": 1737161760147,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.06566601991653442
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1",
|
47 |
+
"sae_lens_version": "5.3.1",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "87fe1ef5-446c-46e8-8d1b-6a791a376122",
|
37 |
+
"datetime_epoch_millis": 1737160352612,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.04690432548522949
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_2",
|
47 |
+
"sae_lens_version": "5.3.1",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "adf9820b-d272-4a42-a909-f039687c1c0e",
|
37 |
+
"datetime_epoch_millis": 1737161063768,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.056285202503204346
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3",
|
47 |
+
"sae_lens_version": "5.3.1",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_5_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "9994714c-941e-41a3-aa7b-94c3db42c606",
|
37 |
+
"datetime_epoch_millis": 1737162450586,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.013133227825164795
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_10_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_5",
|
47 |
+
"sae_lens_version": "5.3.1",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "5d408c21-5d92-40bf-82e6-08801842a9ee",
|
37 |
+
"datetime_epoch_millis": 1737162693795,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.11257034540176392
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "9d0910d5-ddb4-4364-9985-0506ce3f1aa0",
|
37 |
+
"datetime_epoch_millis": 1737164811504,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.018761754035949707
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_3",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "d5f5a2c4-faa4-4727-bbdc-3b3133be257a",
|
37 |
+
"datetime_epoch_millis": 1737165530830,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.06191366910934448
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_5_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "cd98a95a-67c9-4466-856a-c2074e993669",
|
37 |
+
"datetime_epoch_millis": 1737166241725,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.009380877017974854
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "temp_MatryoshkaBatchTopKTrainer_3_fixed_groups_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_5",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 16384,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "883c19c0-1d9c-47da-9106-a27231286718",
|
37 |
+
"datetime_epoch_millis": 1737418380229,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.056285202503204346
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_0",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "9aa4d002-2636-4b8e-8bb4-5f576facd7af",
|
37 |
+
"datetime_epoch_millis": 1737419005224,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.0863039493560791
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "535633b1-27a5-4498-8f12-08446822bb98",
|
37 |
+
"datetime_epoch_millis": 1737419626705,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.058161377906799316
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "433ea6ba-5cc7-4c97-9867-4e89e21dab34",
|
37 |
+
"datetime_epoch_millis": 1737420249435,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.09756100177764893
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_10_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "f4602eb0-a73f-417e-9fc0-0914bdcfdd05",
|
37 |
+
"datetime_epoch_millis": 1737421495916,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.1388368010520935
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_0",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "386b85c5-b5b8-4b23-a8dd-ced08c820565",
|
37 |
+
"datetime_epoch_millis": 1737422126405,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.11257034540176392
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "74ba8085-31ba-4319-afcd-c567c7e4a8eb",
|
37 |
+
"datetime_epoch_millis": 1737422756786,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.06566601991653442
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "17f808bb-35ed-4d05-92ce-e7d3be5dea9c",
|
37 |
+
"datetime_epoch_millis": 1737423379856,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.03564727306365967
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_3_fixed_groups__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_3",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "bda053f4-8675-4a82-b1c5-6261a7665deb",
|
37 |
+
"datetime_epoch_millis": 1737182977983,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.09193247556686401
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_0",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "edcb1728-b3e4-4f18-906a-c0d83ae8320e",
|
37 |
+
"datetime_epoch_millis": 1737183735421,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.18574106693267822
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_1",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "a3bfc03e-6380-47af-ac40-f9aea8b3cc4b",
|
37 |
+
"datetime_epoch_millis": 1737186005511,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.08442777395248413
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "e2b0b3c57a3d256998f8bda15cdb21542f226d1a",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1000_google_gemma-2-2b_ctx1024_0117_resid_post_layer_12_trainer_4",
|
47 |
+
"sae_lens_version": "5.3.0",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "83d0e45b-5214-4b48-a664-50bfecabed35",
|
37 |
+
"datetime_epoch_millis": 1737405407596,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.14258909225463867
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_1",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "3b45e39a-43e7-4c61-bf4b-5c302718c0a0",
|
37 |
+
"datetime_epoch_millis": 1737406106512,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.08255159854888916
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_2",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|
unlearning/matryoshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_4_custom_sae_eval_results.json
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "unlearning",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"wmdp-bio",
|
7 |
+
"high_school_us_history",
|
8 |
+
"college_computer_science",
|
9 |
+
"high_school_geography",
|
10 |
+
"human_aging"
|
11 |
+
],
|
12 |
+
"intervention_method": "clamp_feature_activation",
|
13 |
+
"retain_thresholds": [
|
14 |
+
0.001,
|
15 |
+
0.01
|
16 |
+
],
|
17 |
+
"n_features_list": [
|
18 |
+
10,
|
19 |
+
20
|
20 |
+
],
|
21 |
+
"multipliers": [
|
22 |
+
25,
|
23 |
+
50,
|
24 |
+
100,
|
25 |
+
200
|
26 |
+
],
|
27 |
+
"dataset_size": 1024,
|
28 |
+
"seq_len": 1024,
|
29 |
+
"n_batch_loss_added": 50,
|
30 |
+
"target_metric": "correct",
|
31 |
+
"save_metrics": true,
|
32 |
+
"model_name": "gemma-2-2b-it",
|
33 |
+
"llm_batch_size": 4,
|
34 |
+
"llm_dtype": "bfloat16"
|
35 |
+
},
|
36 |
+
"eval_id": "ebdb0c6c-fcf9-4b36-a5bf-0b8ff7ac8d7e",
|
37 |
+
"datetime_epoch_millis": 1737407505840,
|
38 |
+
"eval_result_metrics": {
|
39 |
+
"unlearning": {
|
40 |
+
"unlearning_score": 0.03939962387084961
|
41 |
+
}
|
42 |
+
},
|
43 |
+
"eval_result_details": [],
|
44 |
+
"sae_bench_commit_hash": "a0fb5e90a82a0414ca9be0511ec3df44af433f2f",
|
45 |
+
"sae_lens_id": "custom_sae",
|
46 |
+
"sae_lens_release_id": "matroyshka_gemma-2-2b-16k-v2_MatryoshkaBatchTopKTrainer_65k_temp1__google_gemma-2-2b_matryoshka_batch_top_k_resid_post_layer_12_trainer_4",
|
47 |
+
"sae_lens_version": "5.3.2",
|
48 |
+
"sae_cfg_dict": {
|
49 |
+
"model_name": "gemma-2-2b",
|
50 |
+
"d_in": 2304,
|
51 |
+
"d_sae": 65536,
|
52 |
+
"hook_layer": 12,
|
53 |
+
"hook_name": "blocks.12.hook_resid_post",
|
54 |
+
"context_size": null,
|
55 |
+
"hook_head_index": null,
|
56 |
+
"architecture": "matryoshka_batch_topk",
|
57 |
+
"apply_b_dec_to_input": null,
|
58 |
+
"finetuning_scaling_factor": null,
|
59 |
+
"activation_fn_str": "",
|
60 |
+
"prepend_bos": true,
|
61 |
+
"normalize_activations": "none",
|
62 |
+
"dtype": "bfloat16",
|
63 |
+
"device": "",
|
64 |
+
"dataset_path": "",
|
65 |
+
"dataset_trust_remote_code": true,
|
66 |
+
"seqpos_slice": [
|
67 |
+
null
|
68 |
+
],
|
69 |
+
"training_tokens": -100000,
|
70 |
+
"sae_lens_training_version": null,
|
71 |
+
"neuronpedia_id": null
|
72 |
+
},
|
73 |
+
"eval_result_unstructured": null
|
74 |
+
}
|