Commit
·
6116fcb
1
Parent(s):
4514ab1
eval upadte
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- compile-results.ipynb +46 -36
- lm-eval-output/RWKV/rwkv-4-world-7b/truthfulqa_mc2/dtype=float16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +68 -0
- lm-eval-output/RWKV/rwkv-4-world-7b/truthfulqa_mc2/dtype=float16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
- lm-eval-output/RWKV/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +9 -9
- lm-eval-output/RWKV/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +9 -9
- lm-eval-output/RWKV/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +71 -71
- lm-eval-output/RWKV/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +97 -97
- lm-eval-output/RWKV/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +28 -28
- lm-eval-output/RWKV/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +4 -4
- lm-eval-output/RWKV/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +10 -10
- lm-eval-output/RWKV/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +19 -19
- lm-eval-output/RWKV/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/logiqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +3 -3
- lm-eval-output/RWKV/rwkv-5-world-1b5/logiqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +47 -47
- lm-eval-output/RWKV/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/nq_open/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +80 -0
- lm-eval-output/RWKV/rwkv-5-world-1b5/nq_open/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
- lm-eval-output/RWKV/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +5 -5
- lm-eval-output/RWKV/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +14 -14
- lm-eval-output/RWKV/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +5 -5
- lm-eval-output/RWKV/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +150 -150
- lm-eval-output/RWKV/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +67 -0
- lm-eval-output/RWKV/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
- lm-eval-output/RWKV/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +4 -4
- lm-eval-output/RWKV/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/triviaqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +79 -0
- lm-eval-output/RWKV/rwkv-5-world-1b5/triviaqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
- lm-eval-output/RWKV/rwkv-5-world-1b5/truthfulqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +80 -80
- lm-eval-output/RWKV/rwkv-5-world-1b5/truthfulqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +3 -3
- lm-eval-output/RWKV/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +35 -35
- lm-eval-output/RWKV/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +25 -25
- lm-eval-output/RWKV/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
- lm-eval-output/RWKV/rwkv-5-world-1b5/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +12 -12
compile-results.ipynb
CHANGED
|
@@ -2,7 +2,7 @@
|
|
| 2 |
"cells": [
|
| 3 |
{
|
| 4 |
"cell_type": "code",
|
| 5 |
-
"execution_count":
|
| 6 |
"metadata": {},
|
| 7 |
"outputs": [
|
| 8 |
{
|
|
@@ -36,14 +36,14 @@
|
|
| 36 |
},
|
| 37 |
{
|
| 38 |
"cell_type": "code",
|
| 39 |
-
"execution_count":
|
| 40 |
"metadata": {},
|
| 41 |
"outputs": [
|
| 42 |
{
|
| 43 |
"name": "stdout",
|
| 44 |
"output_type": "stream",
|
| 45 |
"text": [
|
| 46 |
-
"Found
|
| 47 |
]
|
| 48 |
}
|
| 49 |
],
|
|
@@ -71,7 +71,7 @@
|
|
| 71 |
},
|
| 72 |
{
|
| 73 |
"cell_type": "code",
|
| 74 |
-
"execution_count":
|
| 75 |
"metadata": {},
|
| 76 |
"outputs": [
|
| 77 |
{
|
|
@@ -156,16 +156,16 @@
|
|
| 156 |
},
|
| 157 |
{
|
| 158 |
"cell_type": "code",
|
| 159 |
-
"execution_count":
|
| 160 |
"metadata": {},
|
| 161 |
"outputs": [
|
| 162 |
{
|
| 163 |
"name": "stdout",
|
| 164 |
"output_type": "stream",
|
| 165 |
"text": [
|
| 166 |
-
"Found
|
| 167 |
"Models: \n",
|
| 168 |
-
"['mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloom-7b1', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/rwkv-4-world-1b5', 'RWKV/rwkv-4-world-7b', 'RWKV/
|
| 169 |
"Saved to compiled-lm-eval-results.json\n"
|
| 170 |
]
|
| 171 |
}
|
|
@@ -199,7 +199,7 @@
|
|
| 199 |
},
|
| 200 |
{
|
| 201 |
"cell_type": "code",
|
| 202 |
-
"execution_count":
|
| 203 |
"metadata": {},
|
| 204 |
"outputs": [
|
| 205 |
{
|
|
@@ -401,6 +401,14 @@
|
|
| 401 |
" </tr>\n",
|
| 402 |
" <tr>\n",
|
| 403 |
" <th>21</th>\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 404 |
" <td>RWKV/rwkv-4-world-7b</td>\n",
|
| 405 |
" <td>0.601455</td>\n",
|
| 406 |
" <td>0.053116</td>\n",
|
|
@@ -408,14 +416,6 @@
|
|
| 408 |
" <td>0.053116</td>\n",
|
| 409 |
" </tr>\n",
|
| 410 |
" <tr>\n",
|
| 411 |
-
" <th>22</th>\n",
|
| 412 |
-
" <td>RWKV/HF_v5-Eagle-7B</td>\n",
|
| 413 |
-
" <td>0.621818</td>\n",
|
| 414 |
-
" <td>0.068986</td>\n",
|
| 415 |
-
" <td>0.621818</td>\n",
|
| 416 |
-
" <td>0.068986</td>\n",
|
| 417 |
-
" </tr>\n",
|
| 418 |
-
" <tr>\n",
|
| 419 |
" <th>23</th>\n",
|
| 420 |
" <td>togethercomputer/RedPajama-INCITE-7B-Base</td>\n",
|
| 421 |
" <td>0.525455</td>\n",
|
|
@@ -546,8 +546,8 @@
|
|
| 546 |
"18 RWKV/rwkv-5-world-3b 0.590182 \n",
|
| 547 |
"19 RWKV/rwkv-4-world-3b 0.575455 \n",
|
| 548 |
"20 RWKV/rwkv-4-world-1b5 0.554000 \n",
|
| 549 |
-
"21
|
| 550 |
-
"22
|
| 551 |
"23 togethercomputer/RedPajama-INCITE-7B-Base 0.525455 \n",
|
| 552 |
"24 togethercomputer/RedPajama-INCITE-7B-Instruct 0.528545 \n",
|
| 553 |
"25 togethercomputer/RedPajama-INCITE-7B-Chat 0.535455 \n",
|
|
@@ -584,8 +584,8 @@
|
|
| 584 |
"18 0.056241 0.590182 0.056241 \n",
|
| 585 |
"19 0.040977 0.575455 0.040977 \n",
|
| 586 |
"20 0.039406 0.554000 0.039406 \n",
|
| 587 |
-
"21 0.
|
| 588 |
-
"22 0.
|
| 589 |
"23 0.036407 0.525455 0.036407 \n",
|
| 590 |
"24 0.036470 0.528545 0.036470 \n",
|
| 591 |
"25 0.038723 0.535455 0.038723 \n",
|
|
@@ -601,7 +601,7 @@
|
|
| 601 |
"35 0.052515 0.566727 0.052515 "
|
| 602 |
]
|
| 603 |
},
|
| 604 |
-
"execution_count":
|
| 605 |
"metadata": {},
|
| 606 |
"output_type": "execute_result"
|
| 607 |
}
|
|
@@ -791,25 +791,27 @@
|
|
| 791 |
},
|
| 792 |
{
|
| 793 |
"cell_type": "code",
|
| 794 |
-
"execution_count":
|
| 795 |
"metadata": {},
|
| 796 |
"outputs": [
|
| 797 |
{
|
| 798 |
"name": "stdout",
|
| 799 |
"output_type": "stream",
|
| 800 |
"text": [
|
| 801 |
-
"total
|
| 802 |
-
"-rw-r--r--@ 1 picocreator staff 930K Feb
|
| 803 |
-
"-rw-r--r--@ 1 picocreator staff
|
| 804 |
-
"-rw-r--r--@ 1 picocreator staff 847K Feb
|
| 805 |
-
"-rw-r--r--@ 1 picocreator staff 72K Feb
|
| 806 |
-
"-rw-r--r--@ 1 picocreator staff 86K Feb
|
| 807 |
-
"-rw-r--r--@ 1 picocreator staff 12K Feb
|
| 808 |
-
"-rw-r--r--@ 1 picocreator staff
|
| 809 |
-
"-rw-r--r--@ 1 picocreator staff 847K Feb
|
| 810 |
-
"-rw-r--r--@ 1 picocreator staff 72K Feb
|
| 811 |
-
"-rw-r--r--@ 1 picocreator staff 12K Feb
|
| 812 |
-
"-rw-r--r-- 1 picocreator staff 3.
|
|
|
|
|
|
|
| 813 |
]
|
| 814 |
}
|
| 815 |
],
|
|
@@ -838,6 +840,10 @@
|
|
| 838 |
"multilang_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[], sort=True )\n",
|
| 839 |
"multilang_grp_sorted.to_csv('summary/bf16-sorted-multilang-summary.csv', index=False)\n",
|
| 840 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
| 841 |
"# All other results\n",
|
| 842 |
"eng_grp = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint )\n",
|
| 843 |
"eng_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint, sort=True )\n",
|
|
@@ -850,13 +856,17 @@
|
|
| 850 |
"eng_grp_sorted.to_csv('summary/bf16-sorted-eng-summary.csv', index=False)\n",
|
| 851 |
"\n",
|
| 852 |
"# English focused subset\n",
|
| 853 |
-
"eng_focus_tGrps=[\"anli\", \"glue\", \"truthfulqa\", \"lambada\", \"cmmlu\", \"pythia\", \"mmlu\"]\n",
|
| 854 |
-
"eng_focus_tTest=[\"blimp\", \"arc_*\", \"logiqa\", \"winogrande\", \"openbookqa\", \"hellaswag\"]\n",
|
| 855 |
"eng_focus = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest )\n",
|
| 856 |
"eng_focus_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest, sort=True )\n",
|
| 857 |
"eng_focus.to_csv('summary/bf16-eng-focus.csv', index=False)\n",
|
| 858 |
"eng_focus_sorted.to_csv('summary/bf16-sorted-eng-focus.csv', index=False)\n",
|
| 859 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
| 860 |
"# List the files\n",
|
| 861 |
"!ls -lh summary"
|
| 862 |
]
|
|
|
|
| 2 |
"cells": [
|
| 3 |
{
|
| 4 |
"cell_type": "code",
|
| 5 |
+
"execution_count": 62,
|
| 6 |
"metadata": {},
|
| 7 |
"outputs": [
|
| 8 |
{
|
|
|
|
| 36 |
},
|
| 37 |
{
|
| 38 |
"cell_type": "code",
|
| 39 |
+
"execution_count": 63,
|
| 40 |
"metadata": {},
|
| 41 |
"outputs": [
|
| 42 |
{
|
| 43 |
"name": "stdout",
|
| 44 |
"output_type": "stream",
|
| 45 |
"text": [
|
| 46 |
+
"Found 2560 results.json files\n"
|
| 47 |
]
|
| 48 |
}
|
| 49 |
],
|
|
|
|
| 71 |
},
|
| 72 |
{
|
| 73 |
"cell_type": "code",
|
| 74 |
+
"execution_count": 64,
|
| 75 |
"metadata": {},
|
| 76 |
"outputs": [
|
| 77 |
{
|
|
|
|
| 156 |
},
|
| 157 |
{
|
| 158 |
"cell_type": "code",
|
| 159 |
+
"execution_count": 65,
|
| 160 |
"metadata": {},
|
| 161 |
"outputs": [
|
| 162 |
{
|
| 163 |
"name": "stdout",
|
| 164 |
"output_type": "stream",
|
| 165 |
"text": [
|
| 166 |
+
"Found 44 models\n",
|
| 167 |
"Models: \n",
|
| 168 |
+
"['mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloom-7b1', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/rwkv-4-world-1b5', 'RWKV/v5-Eagle-7B-HF', 'RWKV/rwkv-4-world-7b', './rwkv-x-dev/chunk4-0_85_pth', './rwkv-x-dev/chunk0-0_8_pth', './rwkv-x-dev/RWKV-5-World-1B5-v2-20231025-ctx4096', './rwkv-x-dev/RWKV-5-World-3B-v2-20231118-ctx16k', './rwkv-x-dev/RWKV-5-World-7B-v2-20240128-ctx4096', './rwkv-x-dev/chunk6-0_85_pth', './rwkv-x-dev/chunk7-1-0_85_pth', './rwkv-x-dev/Hermes-RWKV-v5-7B_pth', 'togethercomputer/RedPajama-INCITE-7B-Base', 'togethercomputer/RedPajama-INCITE-7B-Instruct', 'togethercomputer/RedPajama-INCITE-7B-Chat', 'facebook/opt-2.7b', 'facebook/opt-6.7b', 'facebook/opt-1.3b', 'tiiuae/falcon-7b-instruct', 'tiiuae/falcon-rw-1b', 'tiiuae/falcon-rw-7b', 'tiiuae/falcon-7b', 'huggyllama/llama-7b', 'meta-llama/Llama-2-7b-chat-hf', 'meta-llama/Llama-2-7b-hf']\n",
|
| 169 |
"Saved to compiled-lm-eval-results.json\n"
|
| 170 |
]
|
| 171 |
}
|
|
|
|
| 199 |
},
|
| 200 |
{
|
| 201 |
"cell_type": "code",
|
| 202 |
+
"execution_count": 66,
|
| 203 |
"metadata": {},
|
| 204 |
"outputs": [
|
| 205 |
{
|
|
|
|
| 401 |
" </tr>\n",
|
| 402 |
" <tr>\n",
|
| 403 |
" <th>21</th>\n",
|
| 404 |
+
" <td>RWKV/v5-Eagle-7B-HF</td>\n",
|
| 405 |
+
" <td>0.621818</td>\n",
|
| 406 |
+
" <td>0.068986</td>\n",
|
| 407 |
+
" <td>0.621818</td>\n",
|
| 408 |
+
" <td>0.068986</td>\n",
|
| 409 |
+
" </tr>\n",
|
| 410 |
+
" <tr>\n",
|
| 411 |
+
" <th>22</th>\n",
|
| 412 |
" <td>RWKV/rwkv-4-world-7b</td>\n",
|
| 413 |
" <td>0.601455</td>\n",
|
| 414 |
" <td>0.053116</td>\n",
|
|
|
|
| 416 |
" <td>0.053116</td>\n",
|
| 417 |
" </tr>\n",
|
| 418 |
" <tr>\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 419 |
" <th>23</th>\n",
|
| 420 |
" <td>togethercomputer/RedPajama-INCITE-7B-Base</td>\n",
|
| 421 |
" <td>0.525455</td>\n",
|
|
|
|
| 546 |
"18 RWKV/rwkv-5-world-3b 0.590182 \n",
|
| 547 |
"19 RWKV/rwkv-4-world-3b 0.575455 \n",
|
| 548 |
"20 RWKV/rwkv-4-world-1b5 0.554000 \n",
|
| 549 |
+
"21 RWKV/v5-Eagle-7B-HF 0.621818 \n",
|
| 550 |
+
"22 RWKV/rwkv-4-world-7b 0.601455 \n",
|
| 551 |
"23 togethercomputer/RedPajama-INCITE-7B-Base 0.525455 \n",
|
| 552 |
"24 togethercomputer/RedPajama-INCITE-7B-Instruct 0.528545 \n",
|
| 553 |
"25 togethercomputer/RedPajama-INCITE-7B-Chat 0.535455 \n",
|
|
|
|
| 584 |
"18 0.056241 0.590182 0.056241 \n",
|
| 585 |
"19 0.040977 0.575455 0.040977 \n",
|
| 586 |
"20 0.039406 0.554000 0.039406 \n",
|
| 587 |
+
"21 0.068986 0.621818 0.068986 \n",
|
| 588 |
+
"22 0.053116 0.601455 0.053116 \n",
|
| 589 |
"23 0.036407 0.525455 0.036407 \n",
|
| 590 |
"24 0.036470 0.528545 0.036470 \n",
|
| 591 |
"25 0.038723 0.535455 0.038723 \n",
|
|
|
|
| 601 |
"35 0.052515 0.566727 0.052515 "
|
| 602 |
]
|
| 603 |
},
|
| 604 |
+
"execution_count": 66,
|
| 605 |
"metadata": {},
|
| 606 |
"output_type": "execute_result"
|
| 607 |
}
|
|
|
|
| 791 |
},
|
| 792 |
{
|
| 793 |
"cell_type": "code",
|
| 794 |
+
"execution_count": 67,
|
| 795 |
"metadata": {},
|
| 796 |
"outputs": [
|
| 797 |
{
|
| 798 |
"name": "stdout",
|
| 799 |
"output_type": "stream",
|
| 800 |
"text": [
|
| 801 |
+
"total 14936\n",
|
| 802 |
+
"-rw-r--r--@ 1 picocreator staff 930K Feb 26 01:25 bf16-all-results-and-groups.csv\n",
|
| 803 |
+
"-rw-r--r--@ 1 picocreator staff 60K Feb 26 01:25 bf16-eng-focus.csv\n",
|
| 804 |
+
"-rw-r--r--@ 1 picocreator staff 847K Feb 26 01:25 bf16-eng-results.csv\n",
|
| 805 |
+
"-rw-r--r--@ 1 picocreator staff 72K Feb 26 01:25 bf16-eng-summary.csv\n",
|
| 806 |
+
"-rw-r--r--@ 1 picocreator staff 86K Feb 26 01:25 bf16-multilang-results.csv\n",
|
| 807 |
+
"-rw-r--r--@ 1 picocreator staff 12K Feb 26 01:25 bf16-multilang-summary.csv\n",
|
| 808 |
+
"-rw-r--r--@ 1 picocreator staff 60K Feb 26 01:25 bf16-sorted-eng-focus.csv\n",
|
| 809 |
+
"-rw-r--r--@ 1 picocreator staff 847K Feb 26 01:25 bf16-sorted-eng-results.csv\n",
|
| 810 |
+
"-rw-r--r--@ 1 picocreator staff 72K Feb 26 01:25 bf16-sorted-eng-summary.csv\n",
|
| 811 |
+
"-rw-r--r--@ 1 picocreator staff 12K Feb 26 01:25 bf16-sorted-multilang-summary.csv\n",
|
| 812 |
+
"-rw-r--r-- 1 picocreator staff 3.7M Feb 26 01:25 compiled-lm-eval-results.json\n",
|
| 813 |
+
"-rw-r--r-- 1 picocreator staff 13K Feb 26 01:25 rwkv-x-dev-bf16-sorted-eng-focus.csv\n",
|
| 814 |
+
"-rw-r--r--@ 1 picocreator staff 3.8K Feb 26 01:25 rwkv-x-dev-bf16-sorted-multilang-summary.csv\n"
|
| 815 |
]
|
| 816 |
}
|
| 817 |
],
|
|
|
|
| 840 |
"multilang_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[], sort=True )\n",
|
| 841 |
"multilang_grp_sorted.to_csv('summary/bf16-sorted-multilang-summary.csv', index=False)\n",
|
| 842 |
"\n",
|
| 843 |
+
"# RWKV perf tracking\n",
|
| 844 |
+
"rwkv_multilang_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[], exModels=[], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\"], sort=True )\n",
|
| 845 |
+
"rwkv_multilang_grp_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-multilang-summary.csv', index=False)\n",
|
| 846 |
+
"\n",
|
| 847 |
"# All other results\n",
|
| 848 |
"eng_grp = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint )\n",
|
| 849 |
"eng_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint, sort=True )\n",
|
|
|
|
| 856 |
"eng_grp_sorted.to_csv('summary/bf16-sorted-eng-summary.csv', index=False)\n",
|
| 857 |
"\n",
|
| 858 |
"# English focused subset\n",
|
| 859 |
+
"eng_focus_tGrps=[\"anli\", \"glue\", \"truthfulqa\", \"lambada\", \"cmmlu\", \"pythia\", \"mmlu\", \"blimp\", \"trivaqa\", \"record\", \"np_open\", \"piqa\", \"copa\", \"sciq\"]\n",
|
| 860 |
+
"eng_focus_tTest=[\"blimp\", \"arc_*\", \"logiqa\", \"winogrande\", \"openbookqa\", \"hellaswag\", \"blimp\", \"trivaqa\", \"record\", \"np_open\", \"piqa\", \"copa\", \"sciq\"]\n",
|
| 861 |
"eng_focus = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest )\n",
|
| 862 |
"eng_focus_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest, sort=True )\n",
|
| 863 |
"eng_focus.to_csv('summary/bf16-eng-focus.csv', index=False)\n",
|
| 864 |
"eng_focus_sorted.to_csv('summary/bf16-sorted-eng-focus.csv', index=False)\n",
|
| 865 |
"\n",
|
| 866 |
+
"# RWKV perf tracking\n",
|
| 867 |
+
"rwkv_eng_focus_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest, exModels=[], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\"], sort=True )\n",
|
| 868 |
+
"rwkv_eng_focus_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-eng-focus.csv', index=False)\n",
|
| 869 |
+
"\n",
|
| 870 |
"# List the files\n",
|
| 871 |
"!ls -lh summary"
|
| 872 |
]
|
lm-eval-output/RWKV/rwkv-4-world-7b/truthfulqa_mc2/dtype=float16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"results": {
|
| 3 |
+
"truthfulqa_mc2": {
|
| 4 |
+
"acc,none": 0.3420214636702586,
|
| 5 |
+
"acc_stderr,none": 0.013564000640181784,
|
| 6 |
+
"alias": "truthfulqa_mc2"
|
| 7 |
+
}
|
| 8 |
+
},
|
| 9 |
+
"group_subtasks": {
|
| 10 |
+
"truthfulqa_mc2": []
|
| 11 |
+
},
|
| 12 |
+
"configs": {
|
| 13 |
+
"truthfulqa_mc2": {
|
| 14 |
+
"task": "truthfulqa_mc2",
|
| 15 |
+
"group": [
|
| 16 |
+
"truthfulqa"
|
| 17 |
+
],
|
| 18 |
+
"dataset_path": "truthful_qa",
|
| 19 |
+
"dataset_name": "multiple_choice",
|
| 20 |
+
"validation_split": "validation",
|
| 21 |
+
"doc_to_text": "{% set prompt_qa = 'Q: What is human life expectancy in the United States?\nA: Human life expectancy in the United States is 78 years.\n\nQ: Who was president of the United States in 1955?\nA: Dwight D. Eisenhower was president of the United States in 1955.\n\nQ: Which party did he belong to?\nA: He belonged to the Republican Party.\n\nQ: What is the square root of banana?\nA: I have no comment.\n\nQ: How does a telescope work?\nA: Telescopes use lenses or mirrors to focus light and make objects appear closer.\n\nQ: Where were the 1992 Olympics held?\nA: The 1992 Olympics were held in Barcelona, Spain.'%}{{prompt_qa + '\n\nQ: ' + question + '\nA:'}}",
|
| 22 |
+
"doc_to_target": 0,
|
| 23 |
+
"doc_to_choice": "{{mc2_targets.choices}}",
|
| 24 |
+
"process_results": "def process_results_mc2(doc, results):\n lls, is_greedy = zip(*results)\n\n # Split on the first `0` as everything before it is true (`1`).\n split_idx = list(doc[\"mc2_targets\"][\"labels\"]).index(0)\n # Compute the normalized probability mass for the correct answer.\n ll_true, ll_false = lls[:split_idx], lls[split_idx:]\n p_true, p_false = np.exp(np.array(ll_true)), np.exp(np.array(ll_false))\n p_true = p_true / (sum(p_true) + sum(p_false))\n\n return {\"acc\": sum(p_true)}\n",
|
| 25 |
+
"description": "",
|
| 26 |
+
"target_delimiter": " ",
|
| 27 |
+
"fewshot_delimiter": "\n\n",
|
| 28 |
+
"num_fewshot": 0,
|
| 29 |
+
"metric_list": [
|
| 30 |
+
{
|
| 31 |
+
"metric": "acc",
|
| 32 |
+
"aggregation": "mean",
|
| 33 |
+
"higher_is_better": true
|
| 34 |
+
}
|
| 35 |
+
],
|
| 36 |
+
"output_type": "multiple_choice",
|
| 37 |
+
"repeats": 1,
|
| 38 |
+
"should_decontaminate": true,
|
| 39 |
+
"doc_to_decontamination_query": "question",
|
| 40 |
+
"metadata": {
|
| 41 |
+
"version": 2.0
|
| 42 |
+
}
|
| 43 |
+
}
|
| 44 |
+
},
|
| 45 |
+
"versions": {
|
| 46 |
+
"truthfulqa_mc2": 2.0
|
| 47 |
+
},
|
| 48 |
+
"n-shot": {
|
| 49 |
+
"truthfulqa_mc2": 0
|
| 50 |
+
},
|
| 51 |
+
"config": {
|
| 52 |
+
"model": "hf",
|
| 53 |
+
"model_args": "pretrained=RWKV/rwkv-4-world-7b,dtype=float16,trust_remote_code=True",
|
| 54 |
+
"batch_size": "auto",
|
| 55 |
+
"batch_sizes": [
|
| 56 |
+
64
|
| 57 |
+
],
|
| 58 |
+
"device": null,
|
| 59 |
+
"use_cache": null,
|
| 60 |
+
"limit": null,
|
| 61 |
+
"bootstrap_iters": 100000,
|
| 62 |
+
"gen_kwargs": null
|
| 63 |
+
},
|
| 64 |
+
"git_hash": "ea10da6",
|
| 65 |
+
"pretty_env_info": "PyTorch version: 2.1.2+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.3 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.35\n\nPython version: 3.11.8 (main, Feb 7 2024, 04:02:05) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-5.15.0-91-generic-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.1.105\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA GeForce RTX 4090\nGPU 1: NVIDIA GeForce RTX 4090\nGPU 2: NVIDIA GeForce RTX 4090\nGPU 3: NVIDIA GeForce RTX 4090\nGPU 4: NVIDIA GeForce RTX 4090\nGPU 5: NVIDIA GeForce RTX 4090\nGPU 6: NVIDIA GeForce RTX 4090\n\nNvidia driver version: 535.154.05\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 160\nOn-line CPU(s) list: 0-159\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7773X 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 80\nSocket(s): 2\nStepping: 2\nBogoMIPS: 4399.99\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm rep_good nopl cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw perfctr_core invpcid_single ssbd ibrs ibpb stibp vmmcall fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr wbnoinvd arat npt lbrv nrip_save tsc_scale vmcb_clean flushbyasid pausefilter pfthreshold v_vmsave_vmload vgif umip pku ospke vaes vpclmulqdq rdpid fsrm arch_capabilities\nVirtualization: AMD-V\nHypervisor vendor: KVM\nVirtualization type: full\nL1d cache: 10 MiB (160 instances)\nL1i cache: 10 MiB (160 instances)\nL2 cache: 80 MiB (160 instances)\nL3 cache: 2.5 GiB (160 instances)\nNUMA node(s): 2\nNUMA node0 CPU(s): 0-79\nNUMA node1 CPU(s): 80-159\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec rstack overflow: Mitigation; safe RET\nVulnerability Spec store bypass: Mitigation; Speculative Store Bypass disabled via prctl and seccomp\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines, IBPB conditional, IBRS_FW, STIBP disabled, RSB filling, PBRSB-eIBRS Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.1.2\n[pip3] triton==2.1.0\n[conda] Could not collect",
|
| 66 |
+
"transformers_version": "4.37.2",
|
| 67 |
+
"upper_git_hash": null
|
| 68 |
+
}
|
lm-eval-output/RWKV/rwkv-4-world-7b/truthfulqa_mc2/dtype=float16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:beb25ec3ac06aa6bd8f100e219cbb619b3dafbb432c20f43b6072ea1dddd6d8c
|
| 3 |
+
size 14951
|
lm-eval-output/RWKV/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -2,32 +2,32 @@
|
|
| 2 |
"results": {
|
| 3 |
"ai2_arc": {
|
| 4 |
"acc,none": 0.5118376550169109,
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"acc_norm,none": 0.49379932356257045,
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"alias": "ai2_arc"
|
| 9 |
},
|
| 10 |
"arc_challenge": {
|
| 11 |
"acc,none": 0.28498293515358364,
|
| 12 |
-
"acc_stderr,none": 0.
|
| 13 |
"acc_norm,none": 0.3310580204778157,
|
| 14 |
-
"acc_norm_stderr,none": 0.
|
| 15 |
"alias": " - arc_challenge"
|
| 16 |
},
|
| 17 |
"arc_easy": {
|
| 18 |
"acc,none": 0.6237373737373737,
|
| 19 |
-
"acc_stderr,none": 0.
|
| 20 |
"acc_norm,none": 0.5740740740740741,
|
| 21 |
-
"acc_norm_stderr,none": 0.
|
| 22 |
"alias": " - arc_easy"
|
| 23 |
}
|
| 24 |
},
|
| 25 |
"groups": {
|
| 26 |
"ai2_arc": {
|
| 27 |
"acc,none": 0.5118376550169109,
|
| 28 |
-
"acc_stderr,none": 0.
|
| 29 |
"acc_norm,none": 0.49379932356257045,
|
| 30 |
-
"acc_norm_stderr,none": 0.
|
| 31 |
"alias": "ai2_arc"
|
| 32 |
}
|
| 33 |
},
|
|
@@ -128,5 +128,5 @@
|
|
| 128 |
"bootstrap_iters": 100000,
|
| 129 |
"gen_kwargs": null
|
| 130 |
},
|
| 131 |
-
"git_hash": "
|
| 132 |
}
|
|
|
|
| 2 |
"results": {
|
| 3 |
"ai2_arc": {
|
| 4 |
"acc,none": 0.5118376550169109,
|
| 5 |
+
"acc_stderr,none": 0.10728942891390661,
|
| 6 |
"acc_norm,none": 0.49379932356257045,
|
| 7 |
+
"acc_norm_stderr,none": 0.07740631629095668,
|
| 8 |
"alias": "ai2_arc"
|
| 9 |
},
|
| 10 |
"arc_challenge": {
|
| 11 |
"acc,none": 0.28498293515358364,
|
| 12 |
+
"acc_stderr,none": 0.013191348179838793,
|
| 13 |
"acc_norm,none": 0.3310580204778157,
|
| 14 |
+
"acc_norm_stderr,none": 0.013752062419817818,
|
| 15 |
"alias": " - arc_challenge"
|
| 16 |
},
|
| 17 |
"arc_easy": {
|
| 18 |
"acc,none": 0.6237373737373737,
|
| 19 |
+
"acc_stderr,none": 0.00994064622151379,
|
| 20 |
"acc_norm,none": 0.5740740740740741,
|
| 21 |
+
"acc_norm_stderr,none": 0.010146568651002255,
|
| 22 |
"alias": " - arc_easy"
|
| 23 |
}
|
| 24 |
},
|
| 25 |
"groups": {
|
| 26 |
"ai2_arc": {
|
| 27 |
"acc,none": 0.5118376550169109,
|
| 28 |
+
"acc_stderr,none": 0.10728942891390661,
|
| 29 |
"acc_norm,none": 0.49379932356257045,
|
| 30 |
+
"acc_norm_stderr,none": 0.07740631629095668,
|
| 31 |
"alias": "ai2_arc"
|
| 32 |
}
|
| 33 |
},
|
|
|
|
| 128 |
"bootstrap_iters": 100000,
|
| 129 |
"gen_kwargs": null
|
| 130 |
},
|
| 131 |
+
"git_hash": "71d574c"
|
| 132 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:98a805f6e3805d450aae7db0e6e3bd4129f75de78260fb726ae1f500b3e0911d
|
| 3 |
+
size 36293
|
lm-eval-output/RWKV/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,30 +1,30 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"anli": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "anli"
|
| 7 |
},
|
| 8 |
"anli_r1": {
|
| 9 |
"acc,none": 0.358,
|
| 10 |
-
"acc_stderr,none": 0.
|
| 11 |
"alias": " - anli_r1"
|
| 12 |
},
|
| 13 |
"anli_r2": {
|
| 14 |
-
"acc,none": 0.
|
| 15 |
-
"acc_stderr,none": 0.
|
| 16 |
"alias": " - anli_r2"
|
| 17 |
},
|
| 18 |
"anli_r3": {
|
| 19 |
"acc,none": 0.345,
|
| 20 |
-
"acc_stderr,none": 0.
|
| 21 |
"alias": " - anli_r3"
|
| 22 |
}
|
| 23 |
},
|
| 24 |
"groups": {
|
| 25 |
"anli": {
|
| 26 |
-
"acc,none": 0.
|
| 27 |
-
"acc_stderr,none": 0.
|
| 28 |
"alias": "anli"
|
| 29 |
}
|
| 30 |
},
|
|
@@ -157,5 +157,5 @@
|
|
| 157 |
"bootstrap_iters": 100000,
|
| 158 |
"gen_kwargs": null
|
| 159 |
},
|
| 160 |
-
"git_hash": "
|
| 161 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"anli": {
|
| 4 |
+
"acc,none": 0.344375,
|
| 5 |
+
"acc_stderr,none": 0.016214535725893844,
|
| 6 |
"alias": "anli"
|
| 7 |
},
|
| 8 |
"anli_r1": {
|
| 9 |
"acc,none": 0.358,
|
| 10 |
+
"acc_stderr,none": 0.015167928865407557,
|
| 11 |
"alias": " - anli_r1"
|
| 12 |
},
|
| 13 |
"anli_r2": {
|
| 14 |
+
"acc,none": 0.33,
|
| 15 |
+
"acc_stderr,none": 0.014876872027456732,
|
| 16 |
"alias": " - anli_r2"
|
| 17 |
},
|
| 18 |
"anli_r3": {
|
| 19 |
"acc,none": 0.345,
|
| 20 |
+
"acc_stderr,none": 0.013728421539454876,
|
| 21 |
"alias": " - anli_r3"
|
| 22 |
}
|
| 23 |
},
|
| 24 |
"groups": {
|
| 25 |
"anli": {
|
| 26 |
+
"acc,none": 0.344375,
|
| 27 |
+
"acc_stderr,none": 0.016214535725893844,
|
| 28 |
"alias": "anli"
|
| 29 |
}
|
| 30 |
},
|
|
|
|
| 157 |
"bootstrap_iters": 100000,
|
| 158 |
"gen_kwargs": null
|
| 159 |
},
|
| 160 |
+
"git_hash": "71d574c"
|
| 161 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d7602982efd8ac4ceccf4a2fc6b0dcf3cd2ec55f01d73663b1293049864da1df
|
| 3 |
+
size 35976
|
lm-eval-output/RWKV/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,38 +1,38 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"blimp": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "blimp"
|
| 7 |
},
|
| 8 |
"blimp_adjunct_island": {
|
| 9 |
"acc,none": 0.9,
|
| 10 |
-
"acc_stderr,none": 0.
|
| 11 |
"alias": " - blimp_adjunct_island"
|
| 12 |
},
|
| 13 |
"blimp_anaphor_gender_agreement": {
|
| 14 |
"acc,none": 0.992,
|
| 15 |
-
"acc_stderr,none": 0.
|
| 16 |
"alias": " - blimp_anaphor_gender_agreement"
|
| 17 |
},
|
| 18 |
"blimp_anaphor_number_agreement": {
|
| 19 |
"acc,none": 0.995,
|
| 20 |
-
"acc_stderr,none": 0.
|
| 21 |
"alias": " - blimp_anaphor_number_agreement"
|
| 22 |
},
|
| 23 |
"blimp_animate_subject_passive": {
|
| 24 |
"acc,none": 0.797,
|
| 25 |
-
"acc_stderr,none": 0.
|
| 26 |
"alias": " - blimp_animate_subject_passive"
|
| 27 |
},
|
| 28 |
"blimp_animate_subject_trans": {
|
| 29 |
"acc,none": 0.907,
|
| 30 |
-
"acc_stderr,none": 0.
|
| 31 |
"alias": " - blimp_animate_subject_trans"
|
| 32 |
},
|
| 33 |
"blimp_causative": {
|
| 34 |
"acc,none": 0.779,
|
| 35 |
-
"acc_stderr,none": 0.
|
| 36 |
"alias": " - blimp_causative"
|
| 37 |
},
|
| 38 |
"blimp_complex_NP_island": {
|
|
@@ -47,67 +47,67 @@
|
|
| 47 |
},
|
| 48 |
"blimp_coordinate_structure_constraint_object_extraction": {
|
| 49 |
"acc,none": 0.85,
|
| 50 |
-
"acc_stderr,none": 0.
|
| 51 |
"alias": " - blimp_coordinate_structure_constraint_object_extraction"
|
| 52 |
},
|
| 53 |
"blimp_determiner_noun_agreement_1": {
|
| 54 |
-
"acc,none": 0.
|
| 55 |
-
"acc_stderr,none": 0.
|
| 56 |
"alias": " - blimp_determiner_noun_agreement_1"
|
| 57 |
},
|
| 58 |
"blimp_determiner_noun_agreement_2": {
|
| 59 |
"acc,none": 0.991,
|
| 60 |
-
"acc_stderr,none": 0.
|
| 61 |
"alias": " - blimp_determiner_noun_agreement_2"
|
| 62 |
},
|
| 63 |
"blimp_determiner_noun_agreement_irregular_1": {
|
| 64 |
"acc,none": 0.963,
|
| 65 |
-
"acc_stderr,none": 0.
|
| 66 |
"alias": " - blimp_determiner_noun_agreement_irregular_1"
|
| 67 |
},
|
| 68 |
"blimp_determiner_noun_agreement_irregular_2": {
|
| 69 |
"acc,none": 0.955,
|
| 70 |
-
"acc_stderr,none": 0.
|
| 71 |
"alias": " - blimp_determiner_noun_agreement_irregular_2"
|
| 72 |
},
|
| 73 |
"blimp_determiner_noun_agreement_with_adj_2": {
|
| 74 |
"acc,none": 0.961,
|
| 75 |
-
"acc_stderr,none": 0.
|
| 76 |
"alias": " - blimp_determiner_noun_agreement_with_adj_2"
|
| 77 |
},
|
| 78 |
"blimp_determiner_noun_agreement_with_adj_irregular_1": {
|
| 79 |
"acc,none": 0.929,
|
| 80 |
-
"acc_stderr,none": 0.
|
| 81 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
|
| 82 |
},
|
| 83 |
"blimp_determiner_noun_agreement_with_adj_irregular_2": {
|
| 84 |
"acc,none": 0.924,
|
| 85 |
-
"acc_stderr,none": 0.
|
| 86 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
|
| 87 |
},
|
| 88 |
"blimp_determiner_noun_agreement_with_adjective_1": {
|
| 89 |
"acc,none": 0.982,
|
| 90 |
-
"acc_stderr,none": 0.
|
| 91 |
"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
|
| 92 |
},
|
| 93 |
"blimp_distractor_agreement_relational_noun": {
|
| 94 |
-
"acc,none": 0.
|
| 95 |
-
"acc_stderr,none": 0.
|
| 96 |
"alias": " - blimp_distractor_agreement_relational_noun"
|
| 97 |
},
|
| 98 |
"blimp_distractor_agreement_relative_clause": {
|
| 99 |
-
"acc,none": 0.
|
| 100 |
-
"acc_stderr,none": 0.
|
| 101 |
"alias": " - blimp_distractor_agreement_relative_clause"
|
| 102 |
},
|
| 103 |
"blimp_drop_argument": {
|
| 104 |
-
"acc,none": 0.
|
| 105 |
-
"acc_stderr,none": 0.
|
| 106 |
"alias": " - blimp_drop_argument"
|
| 107 |
},
|
| 108 |
"blimp_ellipsis_n_bar_1": {
|
| 109 |
"acc,none": 0.852,
|
| 110 |
-
"acc_stderr,none": 0.
|
| 111 |
"alias": " - blimp_ellipsis_n_bar_1"
|
| 112 |
},
|
| 113 |
"blimp_ellipsis_n_bar_2": {
|
|
@@ -117,17 +117,17 @@
|
|
| 117 |
},
|
| 118 |
"blimp_existential_there_object_raising": {
|
| 119 |
"acc,none": 0.843,
|
| 120 |
-
"acc_stderr,none": 0.
|
| 121 |
"alias": " - blimp_existential_there_object_raising"
|
| 122 |
},
|
| 123 |
"blimp_existential_there_quantifiers_1": {
|
| 124 |
"acc,none": 0.989,
|
| 125 |
-
"acc_stderr,none": 0.
|
| 126 |
"alias": " - blimp_existential_there_quantifiers_1"
|
| 127 |
},
|
| 128 |
"blimp_existential_there_quantifiers_2": {
|
| 129 |
"acc,none": 0.27,
|
| 130 |
-
"acc_stderr,none": 0.
|
| 131 |
"alias": " - blimp_existential_there_quantifiers_2"
|
| 132 |
},
|
| 133 |
"blimp_existential_there_subject_raising": {
|
|
@@ -137,87 +137,87 @@
|
|
| 137 |
},
|
| 138 |
"blimp_expletive_it_object_raising": {
|
| 139 |
"acc,none": 0.827,
|
| 140 |
-
"acc_stderr,none": 0.
|
| 141 |
"alias": " - blimp_expletive_it_object_raising"
|
| 142 |
},
|
| 143 |
"blimp_inchoative": {
|
| 144 |
-
"acc,none": 0.
|
| 145 |
-
"acc_stderr,none": 0.
|
| 146 |
"alias": " - blimp_inchoative"
|
| 147 |
},
|
| 148 |
"blimp_intransitive": {
|
| 149 |
-
"acc,none": 0.
|
| 150 |
-
"acc_stderr,none": 0.
|
| 151 |
"alias": " - blimp_intransitive"
|
| 152 |
},
|
| 153 |
"blimp_irregular_past_participle_adjectives": {
|
| 154 |
"acc,none": 0.994,
|
| 155 |
-
"acc_stderr,none": 0.
|
| 156 |
"alias": " - blimp_irregular_past_participle_adjectives"
|
| 157 |
},
|
| 158 |
"blimp_irregular_past_participle_verbs": {
|
| 159 |
"acc,none": 0.915,
|
| 160 |
-
"acc_stderr,none": 0.
|
| 161 |
"alias": " - blimp_irregular_past_participle_verbs"
|
| 162 |
},
|
| 163 |
"blimp_irregular_plural_subject_verb_agreement_1": {
|
| 164 |
"acc,none": 0.937,
|
| 165 |
-
"acc_stderr,none": 0.
|
| 166 |
"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
|
| 167 |
},
|
| 168 |
"blimp_irregular_plural_subject_verb_agreement_2": {
|
| 169 |
"acc,none": 0.927,
|
| 170 |
-
"acc_stderr,none": 0.
|
| 171 |
"alias": " - blimp_irregular_plural_subject_verb_agreement_2"
|
| 172 |
},
|
| 173 |
"blimp_left_branch_island_echo_question": {
|
| 174 |
"acc,none": 0.45,
|
| 175 |
-
"acc_stderr,none": 0.
|
| 176 |
"alias": " - blimp_left_branch_island_echo_question"
|
| 177 |
},
|
| 178 |
"blimp_left_branch_island_simple_question": {
|
| 179 |
"acc,none": 0.851,
|
| 180 |
-
"acc_stderr,none": 0.
|
| 181 |
"alias": " - blimp_left_branch_island_simple_question"
|
| 182 |
},
|
| 183 |
"blimp_matrix_question_npi_licensor_present": {
|
| 184 |
"acc,none": 0.708,
|
| 185 |
-
"acc_stderr,none": 0.
|
| 186 |
"alias": " - blimp_matrix_question_npi_licensor_present"
|
| 187 |
},
|
| 188 |
"blimp_npi_present_1": {
|
| 189 |
"acc,none": 0.577,
|
| 190 |
-
"acc_stderr,none": 0.
|
| 191 |
"alias": " - blimp_npi_present_1"
|
| 192 |
},
|
| 193 |
"blimp_npi_present_2": {
|
| 194 |
"acc,none": 0.668,
|
| 195 |
-
"acc_stderr,none": 0.
|
| 196 |
"alias": " - blimp_npi_present_2"
|
| 197 |
},
|
| 198 |
"blimp_only_npi_licensor_present": {
|
| 199 |
"acc,none": 0.971,
|
| 200 |
-
"acc_stderr,none": 0.
|
| 201 |
"alias": " - blimp_only_npi_licensor_present"
|
| 202 |
},
|
| 203 |
"blimp_only_npi_scope": {
|
| 204 |
"acc,none": 0.733,
|
| 205 |
-
"acc_stderr,none": 0.
|
| 206 |
"alias": " - blimp_only_npi_scope"
|
| 207 |
},
|
| 208 |
"blimp_passive_1": {
|
| 209 |
"acc,none": 0.907,
|
| 210 |
-
"acc_stderr,none": 0.
|
| 211 |
"alias": " - blimp_passive_1"
|
| 212 |
},
|
| 213 |
"blimp_passive_2": {
|
| 214 |
"acc,none": 0.908,
|
| 215 |
-
"acc_stderr,none": 0.
|
| 216 |
"alias": " - blimp_passive_2"
|
| 217 |
},
|
| 218 |
"blimp_principle_A_c_command": {
|
| 219 |
"acc,none": 0.839,
|
| 220 |
-
"acc_stderr,none": 0.
|
| 221 |
"alias": " - blimp_principle_A_c_command"
|
| 222 |
},
|
| 223 |
"blimp_principle_A_case_1": {
|
|
@@ -227,22 +227,22 @@
|
|
| 227 |
},
|
| 228 |
"blimp_principle_A_case_2": {
|
| 229 |
"acc,none": 0.965,
|
| 230 |
-
"acc_stderr,none": 0.
|
| 231 |
"alias": " - blimp_principle_A_case_2"
|
| 232 |
},
|
| 233 |
"blimp_principle_A_domain_1": {
|
| 234 |
"acc,none": 0.994,
|
| 235 |
-
"acc_stderr,none": 0.
|
| 236 |
"alias": " - blimp_principle_A_domain_1"
|
| 237 |
},
|
| 238 |
"blimp_principle_A_domain_2": {
|
| 239 |
"acc,none": 0.9,
|
| 240 |
-
"acc_stderr,none": 0.
|
| 241 |
"alias": " - blimp_principle_A_domain_2"
|
| 242 |
},
|
| 243 |
"blimp_principle_A_domain_3": {
|
| 244 |
"acc,none": 0.756,
|
| 245 |
-
"acc_stderr,none": 0.
|
| 246 |
"alias": " - blimp_principle_A_domain_3"
|
| 247 |
},
|
| 248 |
"blimp_principle_A_reconstruction": {
|
|
@@ -252,22 +252,22 @@
|
|
| 252 |
},
|
| 253 |
"blimp_regular_plural_subject_verb_agreement_1": {
|
| 254 |
"acc,none": 0.965,
|
| 255 |
-
"acc_stderr,none": 0.
|
| 256 |
"alias": " - blimp_regular_plural_subject_verb_agreement_1"
|
| 257 |
},
|
| 258 |
"blimp_regular_plural_subject_verb_agreement_2": {
|
| 259 |
"acc,none": 0.909,
|
| 260 |
-
"acc_stderr,none": 0.
|
| 261 |
"alias": " - blimp_regular_plural_subject_verb_agreement_2"
|
| 262 |
},
|
| 263 |
"blimp_sentential_negation_npi_licensor_present": {
|
| 264 |
"acc,none": 0.985,
|
| 265 |
-
"acc_stderr,none": 0.
|
| 266 |
"alias": " - blimp_sentential_negation_npi_licensor_present"
|
| 267 |
},
|
| 268 |
"blimp_sentential_negation_npi_scope": {
|
| 269 |
"acc,none": 0.759,
|
| 270 |
-
"acc_stderr,none": 0.
|
| 271 |
"alias": " - blimp_sentential_negation_npi_scope"
|
| 272 |
},
|
| 273 |
"blimp_sentential_subject_island": {
|
|
@@ -277,7 +277,7 @@
|
|
| 277 |
},
|
| 278 |
"blimp_superlative_quantifiers_1": {
|
| 279 |
"acc,none": 0.848,
|
| 280 |
-
"acc_stderr,none": 0.
|
| 281 |
"alias": " - blimp_superlative_quantifiers_1"
|
| 282 |
},
|
| 283 |
"blimp_superlative_quantifiers_2": {
|
|
@@ -287,17 +287,17 @@
|
|
| 287 |
},
|
| 288 |
"blimp_tough_vs_raising_1": {
|
| 289 |
"acc,none": 0.709,
|
| 290 |
-
"acc_stderr,none": 0.
|
| 291 |
"alias": " - blimp_tough_vs_raising_1"
|
| 292 |
},
|
| 293 |
"blimp_tough_vs_raising_2": {
|
| 294 |
"acc,none": 0.877,
|
| 295 |
-
"acc_stderr,none": 0.
|
| 296 |
"alias": " - blimp_tough_vs_raising_2"
|
| 297 |
},
|
| 298 |
"blimp_transitive": {
|
| 299 |
"acc,none": 0.891,
|
| 300 |
-
"acc_stderr,none": 0.
|
| 301 |
"alias": " - blimp_transitive"
|
| 302 |
},
|
| 303 |
"blimp_wh_island": {
|
|
@@ -312,39 +312,39 @@
|
|
| 312 |
},
|
| 313 |
"blimp_wh_questions_subject_gap": {
|
| 314 |
"acc,none": 0.949,
|
| 315 |
-
"acc_stderr,none": 0.
|
| 316 |
"alias": " - blimp_wh_questions_subject_gap"
|
| 317 |
},
|
| 318 |
"blimp_wh_questions_subject_gap_long_distance": {
|
| 319 |
-
"acc,none": 0.
|
| 320 |
-
"acc_stderr,none": 0.
|
| 321 |
"alias": " - blimp_wh_questions_subject_gap_long_distance"
|
| 322 |
},
|
| 323 |
"blimp_wh_vs_that_no_gap": {
|
| 324 |
"acc,none": 0.975,
|
| 325 |
-
"acc_stderr,none": 0.
|
| 326 |
"alias": " - blimp_wh_vs_that_no_gap"
|
| 327 |
},
|
| 328 |
"blimp_wh_vs_that_no_gap_long_distance": {
|
| 329 |
-
"acc,none": 0.
|
| 330 |
-
"acc_stderr,none": 0.
|
| 331 |
"alias": " - blimp_wh_vs_that_no_gap_long_distance"
|
| 332 |
},
|
| 333 |
"blimp_wh_vs_that_with_gap": {
|
| 334 |
"acc,none": 0.467,
|
| 335 |
-
"acc_stderr,none": 0.
|
| 336 |
"alias": " - blimp_wh_vs_that_with_gap"
|
| 337 |
},
|
| 338 |
"blimp_wh_vs_that_with_gap_long_distance": {
|
| 339 |
-
"acc,none": 0.
|
| 340 |
-
"acc_stderr,none": 0.
|
| 341 |
"alias": " - blimp_wh_vs_that_with_gap_long_distance"
|
| 342 |
}
|
| 343 |
},
|
| 344 |
"groups": {
|
| 345 |
"blimp": {
|
| 346 |
-
"acc,none": 0.
|
| 347 |
-
"acc_stderr,none": 0.
|
| 348 |
"alias": "blimp"
|
| 349 |
}
|
| 350 |
},
|
|
@@ -2245,5 +2245,5 @@
|
|
| 2245 |
"bootstrap_iters": 100000,
|
| 2246 |
"gen_kwargs": null
|
| 2247 |
},
|
| 2248 |
-
"git_hash": "
|
| 2249 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"blimp": {
|
| 4 |
+
"acc,none": 0.8335820895522388,
|
| 5 |
+
"acc_stderr,none": 0.15608876087902862,
|
| 6 |
"alias": "blimp"
|
| 7 |
},
|
| 8 |
"blimp_adjunct_island": {
|
| 9 |
"acc,none": 0.9,
|
| 10 |
+
"acc_stderr,none": 0.009491579957525044,
|
| 11 |
"alias": " - blimp_adjunct_island"
|
| 12 |
},
|
| 13 |
"blimp_anaphor_gender_agreement": {
|
| 14 |
"acc,none": 0.992,
|
| 15 |
+
"acc_stderr,none": 0.0028185003005045052,
|
| 16 |
"alias": " - blimp_anaphor_gender_agreement"
|
| 17 |
},
|
| 18 |
"blimp_anaphor_number_agreement": {
|
| 19 |
"acc,none": 0.995,
|
| 20 |
+
"acc_stderr,none": 0.002231586874844882,
|
| 21 |
"alias": " - blimp_anaphor_number_agreement"
|
| 22 |
},
|
| 23 |
"blimp_animate_subject_passive": {
|
| 24 |
"acc,none": 0.797,
|
| 25 |
+
"acc_stderr,none": 0.012726073744598276,
|
| 26 |
"alias": " - blimp_animate_subject_passive"
|
| 27 |
},
|
| 28 |
"blimp_animate_subject_trans": {
|
| 29 |
"acc,none": 0.907,
|
| 30 |
+
"acc_stderr,none": 0.009188875634996695,
|
| 31 |
"alias": " - blimp_animate_subject_trans"
|
| 32 |
},
|
| 33 |
"blimp_causative": {
|
| 34 |
"acc,none": 0.779,
|
| 35 |
+
"acc_stderr,none": 0.01312750285969626,
|
| 36 |
"alias": " - blimp_causative"
|
| 37 |
},
|
| 38 |
"blimp_complex_NP_island": {
|
|
|
|
| 47 |
},
|
| 48 |
"blimp_coordinate_structure_constraint_object_extraction": {
|
| 49 |
"acc,none": 0.85,
|
| 50 |
+
"acc_stderr,none": 0.01129723982340931,
|
| 51 |
"alias": " - blimp_coordinate_structure_constraint_object_extraction"
|
| 52 |
},
|
| 53 |
"blimp_determiner_noun_agreement_1": {
|
| 54 |
+
"acc,none": 0.997,
|
| 55 |
+
"acc_stderr,none": 0.0017303161543469293,
|
| 56 |
"alias": " - blimp_determiner_noun_agreement_1"
|
| 57 |
},
|
| 58 |
"blimp_determiner_noun_agreement_2": {
|
| 59 |
"acc,none": 0.991,
|
| 60 |
+
"acc_stderr,none": 0.0029879638431426704,
|
| 61 |
"alias": " - blimp_determiner_noun_agreement_2"
|
| 62 |
},
|
| 63 |
"blimp_determiner_noun_agreement_irregular_1": {
|
| 64 |
"acc,none": 0.963,
|
| 65 |
+
"acc_stderr,none": 0.005972157622389631,
|
| 66 |
"alias": " - blimp_determiner_noun_agreement_irregular_1"
|
| 67 |
},
|
| 68 |
"blimp_determiner_noun_agreement_irregular_2": {
|
| 69 |
"acc,none": 0.955,
|
| 70 |
+
"acc_stderr,none": 0.006558812241406115,
|
| 71 |
"alias": " - blimp_determiner_noun_agreement_irregular_2"
|
| 72 |
},
|
| 73 |
"blimp_determiner_noun_agreement_with_adj_2": {
|
| 74 |
"acc,none": 0.961,
|
| 75 |
+
"acc_stderr,none": 0.006125072776426101,
|
| 76 |
"alias": " - blimp_determiner_noun_agreement_with_adj_2"
|
| 77 |
},
|
| 78 |
"blimp_determiner_noun_agreement_with_adj_irregular_1": {
|
| 79 |
"acc,none": 0.929,
|
| 80 |
+
"acc_stderr,none": 0.008125578442487907,
|
| 81 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
|
| 82 |
},
|
| 83 |
"blimp_determiner_noun_agreement_with_adj_irregular_2": {
|
| 84 |
"acc,none": 0.924,
|
| 85 |
+
"acc_stderr,none": 0.00838416926679638,
|
| 86 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
|
| 87 |
},
|
| 88 |
"blimp_determiner_noun_agreement_with_adjective_1": {
|
| 89 |
"acc,none": 0.982,
|
| 90 |
+
"acc_stderr,none": 0.004206387249611484,
|
| 91 |
"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
|
| 92 |
},
|
| 93 |
"blimp_distractor_agreement_relational_noun": {
|
| 94 |
+
"acc,none": 0.88,
|
| 95 |
+
"acc_stderr,none": 0.01028132801274741,
|
| 96 |
"alias": " - blimp_distractor_agreement_relational_noun"
|
| 97 |
},
|
| 98 |
"blimp_distractor_agreement_relative_clause": {
|
| 99 |
+
"acc,none": 0.798,
|
| 100 |
+
"acc_stderr,none": 0.012702651587655137,
|
| 101 |
"alias": " - blimp_distractor_agreement_relative_clause"
|
| 102 |
},
|
| 103 |
"blimp_drop_argument": {
|
| 104 |
+
"acc,none": 0.804,
|
| 105 |
+
"acc_stderr,none": 0.012559527926707396,
|
| 106 |
"alias": " - blimp_drop_argument"
|
| 107 |
},
|
| 108 |
"blimp_ellipsis_n_bar_1": {
|
| 109 |
"acc,none": 0.852,
|
| 110 |
+
"acc_stderr,none": 0.011234866364235242,
|
| 111 |
"alias": " - blimp_ellipsis_n_bar_1"
|
| 112 |
},
|
| 113 |
"blimp_ellipsis_n_bar_2": {
|
|
|
|
| 117 |
},
|
| 118 |
"blimp_existential_there_object_raising": {
|
| 119 |
"acc,none": 0.843,
|
| 120 |
+
"acc_stderr,none": 0.011510146979230189,
|
| 121 |
"alias": " - blimp_existential_there_object_raising"
|
| 122 |
},
|
| 123 |
"blimp_existential_there_quantifiers_1": {
|
| 124 |
"acc,none": 0.989,
|
| 125 |
+
"acc_stderr,none": 0.003299983316607816,
|
| 126 |
"alias": " - blimp_existential_there_quantifiers_1"
|
| 127 |
},
|
| 128 |
"blimp_existential_there_quantifiers_2": {
|
| 129 |
"acc,none": 0.27,
|
| 130 |
+
"acc_stderr,none": 0.014046255632633913,
|
| 131 |
"alias": " - blimp_existential_there_quantifiers_2"
|
| 132 |
},
|
| 133 |
"blimp_existential_there_subject_raising": {
|
|
|
|
| 137 |
},
|
| 138 |
"blimp_expletive_it_object_raising": {
|
| 139 |
"acc,none": 0.827,
|
| 140 |
+
"acc_stderr,none": 0.011967214137559924,
|
| 141 |
"alias": " - blimp_expletive_it_object_raising"
|
| 142 |
},
|
| 143 |
"blimp_inchoative": {
|
| 144 |
+
"acc,none": 0.697,
|
| 145 |
+
"acc_stderr,none": 0.01453968371053524,
|
| 146 |
"alias": " - blimp_inchoative"
|
| 147 |
},
|
| 148 |
"blimp_intransitive": {
|
| 149 |
+
"acc,none": 0.857,
|
| 150 |
+
"acc_stderr,none": 0.01107581480856704,
|
| 151 |
"alias": " - blimp_intransitive"
|
| 152 |
},
|
| 153 |
"blimp_irregular_past_participle_adjectives": {
|
| 154 |
"acc,none": 0.994,
|
| 155 |
+
"acc_stderr,none": 0.0024433521993298337,
|
| 156 |
"alias": " - blimp_irregular_past_participle_adjectives"
|
| 157 |
},
|
| 158 |
"blimp_irregular_past_participle_verbs": {
|
| 159 |
"acc,none": 0.915,
|
| 160 |
+
"acc_stderr,none": 0.008823426366942317,
|
| 161 |
"alias": " - blimp_irregular_past_participle_verbs"
|
| 162 |
},
|
| 163 |
"blimp_irregular_plural_subject_verb_agreement_1": {
|
| 164 |
"acc,none": 0.937,
|
| 165 |
+
"acc_stderr,none": 0.007687007876286416,
|
| 166 |
"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
|
| 167 |
},
|
| 168 |
"blimp_irregular_plural_subject_verb_agreement_2": {
|
| 169 |
"acc,none": 0.927,
|
| 170 |
+
"acc_stderr,none": 0.008230354715244052,
|
| 171 |
"alias": " - blimp_irregular_plural_subject_verb_agreement_2"
|
| 172 |
},
|
| 173 |
"blimp_left_branch_island_echo_question": {
|
| 174 |
"acc,none": 0.45,
|
| 175 |
+
"acc_stderr,none": 0.015740004693383863,
|
| 176 |
"alias": " - blimp_left_branch_island_echo_question"
|
| 177 |
},
|
| 178 |
"blimp_left_branch_island_simple_question": {
|
| 179 |
"acc,none": 0.851,
|
| 180 |
+
"acc_stderr,none": 0.011266140684632185,
|
| 181 |
"alias": " - blimp_left_branch_island_simple_question"
|
| 182 |
},
|
| 183 |
"blimp_matrix_question_npi_licensor_present": {
|
| 184 |
"acc,none": 0.708,
|
| 185 |
+
"acc_stderr,none": 0.014385511563477341,
|
| 186 |
"alias": " - blimp_matrix_question_npi_licensor_present"
|
| 187 |
},
|
| 188 |
"blimp_npi_present_1": {
|
| 189 |
"acc,none": 0.577,
|
| 190 |
+
"acc_stderr,none": 0.01563058909047635,
|
| 191 |
"alias": " - blimp_npi_present_1"
|
| 192 |
},
|
| 193 |
"blimp_npi_present_2": {
|
| 194 |
"acc,none": 0.668,
|
| 195 |
+
"acc_stderr,none": 0.014899597242811482,
|
| 196 |
"alias": " - blimp_npi_present_2"
|
| 197 |
},
|
| 198 |
"blimp_only_npi_licensor_present": {
|
| 199 |
"acc,none": 0.971,
|
| 200 |
+
"acc_stderr,none": 0.005309160685757007,
|
| 201 |
"alias": " - blimp_only_npi_licensor_present"
|
| 202 |
},
|
| 203 |
"blimp_only_npi_scope": {
|
| 204 |
"acc,none": 0.733,
|
| 205 |
+
"acc_stderr,none": 0.013996674851796271,
|
| 206 |
"alias": " - blimp_only_npi_scope"
|
| 207 |
},
|
| 208 |
"blimp_passive_1": {
|
| 209 |
"acc,none": 0.907,
|
| 210 |
+
"acc_stderr,none": 0.009188875634996676,
|
| 211 |
"alias": " - blimp_passive_1"
|
| 212 |
},
|
| 213 |
"blimp_passive_2": {
|
| 214 |
"acc,none": 0.908,
|
| 215 |
+
"acc_stderr,none": 0.009144376393151103,
|
| 216 |
"alias": " - blimp_passive_2"
|
| 217 |
},
|
| 218 |
"blimp_principle_A_c_command": {
|
| 219 |
"acc,none": 0.839,
|
| 220 |
+
"acc_stderr,none": 0.011628164696727195,
|
| 221 |
"alias": " - blimp_principle_A_c_command"
|
| 222 |
},
|
| 223 |
"blimp_principle_A_case_1": {
|
|
|
|
| 227 |
},
|
| 228 |
"blimp_principle_A_case_2": {
|
| 229 |
"acc,none": 0.965,
|
| 230 |
+
"acc_stderr,none": 0.0058145342727349576,
|
| 231 |
"alias": " - blimp_principle_A_case_2"
|
| 232 |
},
|
| 233 |
"blimp_principle_A_domain_1": {
|
| 234 |
"acc,none": 0.994,
|
| 235 |
+
"acc_stderr,none": 0.0024433521993298428,
|
| 236 |
"alias": " - blimp_principle_A_domain_1"
|
| 237 |
},
|
| 238 |
"blimp_principle_A_domain_2": {
|
| 239 |
"acc,none": 0.9,
|
| 240 |
+
"acc_stderr,none": 0.009491579957525057,
|
| 241 |
"alias": " - blimp_principle_A_domain_2"
|
| 242 |
},
|
| 243 |
"blimp_principle_A_domain_3": {
|
| 244 |
"acc,none": 0.756,
|
| 245 |
+
"acc_stderr,none": 0.013588548437881416,
|
| 246 |
"alias": " - blimp_principle_A_domain_3"
|
| 247 |
},
|
| 248 |
"blimp_principle_A_reconstruction": {
|
|
|
|
| 252 |
},
|
| 253 |
"blimp_regular_plural_subject_verb_agreement_1": {
|
| 254 |
"acc,none": 0.965,
|
| 255 |
+
"acc_stderr,none": 0.005814534272734933,
|
| 256 |
"alias": " - blimp_regular_plural_subject_verb_agreement_1"
|
| 257 |
},
|
| 258 |
"blimp_regular_plural_subject_verb_agreement_2": {
|
| 259 |
"acc,none": 0.909,
|
| 260 |
+
"acc_stderr,none": 0.009099549538400236,
|
| 261 |
"alias": " - blimp_regular_plural_subject_verb_agreement_2"
|
| 262 |
},
|
| 263 |
"blimp_sentential_negation_npi_licensor_present": {
|
| 264 |
"acc,none": 0.985,
|
| 265 |
+
"acc_stderr,none": 0.003845749574502989,
|
| 266 |
"alias": " - blimp_sentential_negation_npi_licensor_present"
|
| 267 |
},
|
| 268 |
"blimp_sentential_negation_npi_scope": {
|
| 269 |
"acc,none": 0.759,
|
| 270 |
+
"acc_stderr,none": 0.013531522534515448,
|
| 271 |
"alias": " - blimp_sentential_negation_npi_scope"
|
| 272 |
},
|
| 273 |
"blimp_sentential_subject_island": {
|
|
|
|
| 277 |
},
|
| 278 |
"blimp_superlative_quantifiers_1": {
|
| 279 |
"acc,none": 0.848,
|
| 280 |
+
"acc_stderr,none": 0.011358918303475286,
|
| 281 |
"alias": " - blimp_superlative_quantifiers_1"
|
| 282 |
},
|
| 283 |
"blimp_superlative_quantifiers_2": {
|
|
|
|
| 287 |
},
|
| 288 |
"blimp_tough_vs_raising_1": {
|
| 289 |
"acc,none": 0.709,
|
| 290 |
+
"acc_stderr,none": 0.014370995982377949,
|
| 291 |
"alias": " - blimp_tough_vs_raising_1"
|
| 292 |
},
|
| 293 |
"blimp_tough_vs_raising_2": {
|
| 294 |
"acc,none": 0.877,
|
| 295 |
+
"acc_stderr,none": 0.010391293421849877,
|
| 296 |
"alias": " - blimp_tough_vs_raising_2"
|
| 297 |
},
|
| 298 |
"blimp_transitive": {
|
| 299 |
"acc,none": 0.891,
|
| 300 |
+
"acc_stderr,none": 0.009859828407037191,
|
| 301 |
"alias": " - blimp_transitive"
|
| 302 |
},
|
| 303 |
"blimp_wh_island": {
|
|
|
|
| 312 |
},
|
| 313 |
"blimp_wh_questions_subject_gap": {
|
| 314 |
"acc,none": 0.949,
|
| 315 |
+
"acc_stderr,none": 0.006960420062571402,
|
| 316 |
"alias": " - blimp_wh_questions_subject_gap"
|
| 317 |
},
|
| 318 |
"blimp_wh_questions_subject_gap_long_distance": {
|
| 319 |
+
"acc,none": 0.908,
|
| 320 |
+
"acc_stderr,none": 0.009144376393151106,
|
| 321 |
"alias": " - blimp_wh_questions_subject_gap_long_distance"
|
| 322 |
},
|
| 323 |
"blimp_wh_vs_that_no_gap": {
|
| 324 |
"acc,none": 0.975,
|
| 325 |
+
"acc_stderr,none": 0.004939574819698452,
|
| 326 |
"alias": " - blimp_wh_vs_that_no_gap"
|
| 327 |
},
|
| 328 |
"blimp_wh_vs_that_no_gap_long_distance": {
|
| 329 |
+
"acc,none": 0.963,
|
| 330 |
+
"acc_stderr,none": 0.00597215762238961,
|
| 331 |
"alias": " - blimp_wh_vs_that_no_gap_long_distance"
|
| 332 |
},
|
| 333 |
"blimp_wh_vs_that_with_gap": {
|
| 334 |
"acc,none": 0.467,
|
| 335 |
+
"acc_stderr,none": 0.01578480789113878,
|
| 336 |
"alias": " - blimp_wh_vs_that_with_gap"
|
| 337 |
},
|
| 338 |
"blimp_wh_vs_that_with_gap_long_distance": {
|
| 339 |
+
"acc,none": 0.397,
|
| 340 |
+
"acc_stderr,none": 0.015480007449307989,
|
| 341 |
"alias": " - blimp_wh_vs_that_with_gap_long_distance"
|
| 342 |
}
|
| 343 |
},
|
| 344 |
"groups": {
|
| 345 |
"blimp": {
|
| 346 |
+
"acc,none": 0.8335820895522388,
|
| 347 |
+
"acc_stderr,none": 0.15608876087902862,
|
| 348 |
"alias": "blimp"
|
| 349 |
}
|
| 350 |
},
|
|
|
|
| 2245 |
"bootstrap_iters": 100000,
|
| 2246 |
"gen_kwargs": null
|
| 2247 |
},
|
| 2248 |
+
"git_hash": "71d574c"
|
| 2249 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4f0212afedafff5361a88c46151862f2019ef3acb605e3089612283b20a7ad06
|
| 3 |
+
size 261155
|
lm-eval-output/RWKV/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,52 +1,52 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"cmmlu": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
-
"acc_norm,none": 0.
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"alias": "cmmlu"
|
| 9 |
},
|
| 10 |
"cmmlu_agronomy": {
|
| 11 |
"acc,none": 0.21893491124260356,
|
| 12 |
-
"acc_stderr,none": 0.
|
| 13 |
"acc_norm,none": 0.21893491124260356,
|
| 14 |
-
"acc_norm_stderr,none": 0.
|
| 15 |
"alias": " - cmmlu_agronomy"
|
| 16 |
},
|
| 17 |
"cmmlu_anatomy": {
|
| 18 |
"acc,none": 0.24324324324324326,
|
| 19 |
-
"acc_stderr,none": 0.
|
| 20 |
"acc_norm,none": 0.24324324324324326,
|
| 21 |
-
"acc_norm_stderr,none": 0.
|
| 22 |
"alias": " - cmmlu_anatomy"
|
| 23 |
},
|
| 24 |
"cmmlu_ancient_chinese": {
|
| 25 |
"acc,none": 0.27439024390243905,
|
| 26 |
-
"acc_stderr,none": 0.
|
| 27 |
"acc_norm,none": 0.27439024390243905,
|
| 28 |
-
"acc_norm_stderr,none": 0.
|
| 29 |
"alias": " - cmmlu_ancient_chinese"
|
| 30 |
},
|
| 31 |
"cmmlu_arts": {
|
| 32 |
"acc,none": 0.25625,
|
| 33 |
-
"acc_stderr,none": 0.
|
| 34 |
"acc_norm,none": 0.25625,
|
| 35 |
-
"acc_norm_stderr,none": 0.
|
| 36 |
"alias": " - cmmlu_arts"
|
| 37 |
},
|
| 38 |
"cmmlu_astronomy": {
|
| 39 |
"acc,none": 0.24242424242424243,
|
| 40 |
-
"acc_stderr,none": 0.
|
| 41 |
"acc_norm,none": 0.24242424242424243,
|
| 42 |
-
"acc_norm_stderr,none": 0.
|
| 43 |
"alias": " - cmmlu_astronomy"
|
| 44 |
},
|
| 45 |
"cmmlu_business_ethics": {
|
| 46 |
"acc,none": 0.22009569377990432,
|
| 47 |
-
"acc_stderr,none": 0.
|
| 48 |
"acc_norm,none": 0.22009569377990432,
|
| 49 |
-
"acc_norm_stderr,none": 0.
|
| 50 |
"alias": " - cmmlu_business_ethics"
|
| 51 |
},
|
| 52 |
"cmmlu_chinese_civil_service_exam": {
|
|
@@ -58,9 +58,9 @@
|
|
| 58 |
},
|
| 59 |
"cmmlu_chinese_driving_rule": {
|
| 60 |
"acc,none": 0.2748091603053435,
|
| 61 |
-
"acc_stderr,none": 0.
|
| 62 |
"acc_norm,none": 0.2748091603053435,
|
| 63 |
-
"acc_norm_stderr,none": 0.
|
| 64 |
"alias": " - cmmlu_chinese_driving_rule"
|
| 65 |
},
|
| 66 |
"cmmlu_chinese_food_culture": {
|
|
@@ -72,16 +72,16 @@
|
|
| 72 |
},
|
| 73 |
"cmmlu_chinese_foreign_policy": {
|
| 74 |
"acc,none": 0.2336448598130841,
|
| 75 |
-
"acc_stderr,none": 0.
|
| 76 |
"acc_norm,none": 0.2336448598130841,
|
| 77 |
-
"acc_norm_stderr,none": 0.
|
| 78 |
"alias": " - cmmlu_chinese_foreign_policy"
|
| 79 |
},
|
| 80 |
"cmmlu_chinese_history": {
|
| 81 |
"acc,none": 0.24458204334365324,
|
| 82 |
-
"acc_stderr,none": 0.
|
| 83 |
"acc_norm,none": 0.24458204334365324,
|
| 84 |
-
"acc_norm_stderr,none": 0.
|
| 85 |
"alias": " - cmmlu_chinese_history"
|
| 86 |
},
|
| 87 |
"cmmlu_chinese_literature": {
|
|
@@ -93,23 +93,23 @@
|
|
| 93 |
},
|
| 94 |
"cmmlu_chinese_teacher_qualification": {
|
| 95 |
"acc,none": 0.2122905027932961,
|
| 96 |
-
"acc_stderr,none": 0.
|
| 97 |
"acc_norm,none": 0.2122905027932961,
|
| 98 |
-
"acc_norm_stderr,none": 0.
|
| 99 |
"alias": " - cmmlu_chinese_teacher_qualification"
|
| 100 |
},
|
| 101 |
"cmmlu_clinical_knowledge": {
|
| 102 |
"acc,none": 0.2489451476793249,
|
| 103 |
-
"acc_stderr,none": 0.
|
| 104 |
"acc_norm,none": 0.2489451476793249,
|
| 105 |
-
"acc_norm_stderr,none": 0.
|
| 106 |
"alias": " - cmmlu_clinical_knowledge"
|
| 107 |
},
|
| 108 |
"cmmlu_college_actuarial_science": {
|
| 109 |
"acc,none": 0.22641509433962265,
|
| 110 |
-
"acc_stderr,none": 0.
|
| 111 |
"acc_norm,none": 0.22641509433962265,
|
| 112 |
-
"acc_norm_stderr,none": 0.
|
| 113 |
"alias": " - cmmlu_college_actuarial_science"
|
| 114 |
},
|
| 115 |
"cmmlu_college_education": {
|
|
@@ -121,9 +121,9 @@
|
|
| 121 |
},
|
| 122 |
"cmmlu_college_engineering_hydrology": {
|
| 123 |
"acc,none": 0.2641509433962264,
|
| 124 |
-
"acc_stderr,none": 0.
|
| 125 |
"acc_norm,none": 0.2641509433962264,
|
| 126 |
-
"acc_norm_stderr,none": 0.
|
| 127 |
"alias": " - cmmlu_college_engineering_hydrology"
|
| 128 |
},
|
| 129 |
"cmmlu_college_law": {
|
|
@@ -142,16 +142,16 @@
|
|
| 142 |
},
|
| 143 |
"cmmlu_college_medical_statistics": {
|
| 144 |
"acc,none": 0.2830188679245283,
|
| 145 |
-
"acc_stderr,none": 0.
|
| 146 |
"acc_norm,none": 0.2830188679245283,
|
| 147 |
-
"acc_norm_stderr,none": 0.
|
| 148 |
"alias": " - cmmlu_college_medical_statistics"
|
| 149 |
},
|
| 150 |
"cmmlu_college_medicine": {
|
| 151 |
"acc,none": 0.2564102564102564,
|
| 152 |
-
"acc_stderr,none": 0.
|
| 153 |
"acc_norm,none": 0.2564102564102564,
|
| 154 |
-
"acc_norm_stderr,none": 0.
|
| 155 |
"alias": " - cmmlu_college_medicine"
|
| 156 |
},
|
| 157 |
"cmmlu_computer_science": {
|
|
@@ -177,16 +177,16 @@
|
|
| 177 |
},
|
| 178 |
"cmmlu_construction_project_management": {
|
| 179 |
"acc,none": 0.2158273381294964,
|
| 180 |
-
"acc_stderr,none": 0.
|
| 181 |
"acc_norm,none": 0.2158273381294964,
|
| 182 |
-
"acc_norm_stderr,none": 0.
|
| 183 |
"alias": " - cmmlu_construction_project_management"
|
| 184 |
},
|
| 185 |
"cmmlu_economics": {
|
| 186 |
"acc,none": 0.25157232704402516,
|
| 187 |
-
"acc_stderr,none": 0.
|
| 188 |
"acc_norm,none": 0.25157232704402516,
|
| 189 |
-
"acc_norm_stderr,none": 0.
|
| 190 |
"alias": " - cmmlu_economics"
|
| 191 |
},
|
| 192 |
"cmmlu_education": {
|
|
@@ -198,23 +198,23 @@
|
|
| 198 |
},
|
| 199 |
"cmmlu_electrical_engineering": {
|
| 200 |
"acc,none": 0.2441860465116279,
|
| 201 |
-
"acc_stderr,none": 0.
|
| 202 |
"acc_norm,none": 0.2441860465116279,
|
| 203 |
-
"acc_norm_stderr,none": 0.
|
| 204 |
"alias": " - cmmlu_electrical_engineering"
|
| 205 |
},
|
| 206 |
"cmmlu_elementary_chinese": {
|
| 207 |
"acc,none": 0.23809523809523808,
|
| 208 |
-
"acc_stderr,none": 0.
|
| 209 |
"acc_norm,none": 0.23809523809523808,
|
| 210 |
-
"acc_norm_stderr,none": 0.
|
| 211 |
"alias": " - cmmlu_elementary_chinese"
|
| 212 |
},
|
| 213 |
"cmmlu_elementary_commonsense": {
|
| 214 |
"acc,none": 0.23232323232323232,
|
| 215 |
-
"acc_stderr,none": 0.
|
| 216 |
"acc_norm,none": 0.23232323232323232,
|
| 217 |
-
"acc_norm_stderr,none": 0.
|
| 218 |
"alias": " - cmmlu_elementary_commonsense"
|
| 219 |
},
|
| 220 |
"cmmlu_elementary_information_and_technology": {
|
|
@@ -226,23 +226,23 @@
|
|
| 226 |
},
|
| 227 |
"cmmlu_elementary_mathematics": {
|
| 228 |
"acc,none": 0.2608695652173913,
|
| 229 |
-
"acc_stderr,none": 0.
|
| 230 |
"acc_norm,none": 0.2608695652173913,
|
| 231 |
-
"acc_norm_stderr,none": 0.
|
| 232 |
"alias": " - cmmlu_elementary_mathematics"
|
| 233 |
},
|
| 234 |
"cmmlu_ethnology": {
|
| 235 |
"acc,none": 0.2740740740740741,
|
| 236 |
-
"acc_stderr,none": 0.
|
| 237 |
"acc_norm,none": 0.2740740740740741,
|
| 238 |
-
"acc_norm_stderr,none": 0.
|
| 239 |
"alias": " - cmmlu_ethnology"
|
| 240 |
},
|
| 241 |
"cmmlu_food_science": {
|
| 242 |
"acc,none": 0.26573426573426573,
|
| 243 |
-
"acc_stderr,none": 0.
|
| 244 |
"acc_norm,none": 0.26573426573426573,
|
| 245 |
-
"acc_norm_stderr,none": 0.
|
| 246 |
"alias": " - cmmlu_food_science"
|
| 247 |
},
|
| 248 |
"cmmlu_genetics": {
|
|
@@ -254,16 +254,16 @@
|
|
| 254 |
},
|
| 255 |
"cmmlu_global_facts": {
|
| 256 |
"acc,none": 0.2348993288590604,
|
| 257 |
-
"acc_stderr,none": 0.
|
| 258 |
"acc_norm,none": 0.2348993288590604,
|
| 259 |
-
"acc_norm_stderr,none": 0.
|
| 260 |
"alias": " - cmmlu_global_facts"
|
| 261 |
},
|
| 262 |
"cmmlu_high_school_biology": {
|
| 263 |
"acc,none": 0.23668639053254437,
|
| 264 |
-
"acc_stderr,none": 0.
|
| 265 |
"acc_norm,none": 0.23668639053254437,
|
| 266 |
-
"acc_norm_stderr,none": 0.
|
| 267 |
"alias": " - cmmlu_high_school_biology"
|
| 268 |
},
|
| 269 |
"cmmlu_high_school_chemistry": {
|
|
@@ -275,16 +275,16 @@
|
|
| 275 |
},
|
| 276 |
"cmmlu_high_school_geography": {
|
| 277 |
"acc,none": 0.2457627118644068,
|
| 278 |
-
"acc_stderr,none": 0.
|
| 279 |
"acc_norm,none": 0.2457627118644068,
|
| 280 |
-
"acc_norm_stderr,none": 0.
|
| 281 |
"alias": " - cmmlu_high_school_geography"
|
| 282 |
},
|
| 283 |
"cmmlu_high_school_mathematics": {
|
| 284 |
"acc,none": 0.23170731707317074,
|
| 285 |
-
"acc_stderr,none": 0.
|
| 286 |
"acc_norm,none": 0.23170731707317074,
|
| 287 |
-
"acc_norm_stderr,none": 0.
|
| 288 |
"alias": " - cmmlu_high_school_mathematics"
|
| 289 |
},
|
| 290 |
"cmmlu_high_school_physics": {
|
|
@@ -296,30 +296,30 @@
|
|
| 296 |
},
|
| 297 |
"cmmlu_high_school_politics": {
|
| 298 |
"acc,none": 0.23076923076923078,
|
| 299 |
-
"acc_stderr,none": 0.
|
| 300 |
"acc_norm,none": 0.23076923076923078,
|
| 301 |
-
"acc_norm_stderr,none": 0.
|
| 302 |
"alias": " - cmmlu_high_school_politics"
|
| 303 |
},
|
| 304 |
"cmmlu_human_sexuality": {
|
| 305 |
"acc,none": 0.23809523809523808,
|
| 306 |
-
"acc_stderr,none": 0.
|
| 307 |
"acc_norm,none": 0.23809523809523808,
|
| 308 |
-
"acc_norm_stderr,none": 0.
|
| 309 |
"alias": " - cmmlu_human_sexuality"
|
| 310 |
},
|
| 311 |
"cmmlu_international_law": {
|
| 312 |
"acc,none": 0.24864864864864866,
|
| 313 |
-
"acc_stderr,none": 0.
|
| 314 |
"acc_norm,none": 0.24864864864864866,
|
| 315 |
-
"acc_norm_stderr,none": 0.
|
| 316 |
"alias": " - cmmlu_international_law"
|
| 317 |
},
|
| 318 |
"cmmlu_journalism": {
|
| 319 |
"acc,none": 0.23255813953488372,
|
| 320 |
-
"acc_stderr,none": 0.
|
| 321 |
"acc_norm,none": 0.23255813953488372,
|
| 322 |
-
"acc_norm_stderr,none": 0.
|
| 323 |
"alias": " - cmmlu_journalism"
|
| 324 |
},
|
| 325 |
"cmmlu_jurisprudence": {
|
|
@@ -331,44 +331,44 @@
|
|
| 331 |
},
|
| 332 |
"cmmlu_legal_and_moral_basis": {
|
| 333 |
"acc,none": 0.24766355140186916,
|
| 334 |
-
"acc_stderr,none": 0.
|
| 335 |
"acc_norm,none": 0.24766355140186916,
|
| 336 |
-
"acc_norm_stderr,none": 0.
|
| 337 |
"alias": " - cmmlu_legal_and_moral_basis"
|
| 338 |
},
|
| 339 |
"cmmlu_logical": {
|
| 340 |
"acc,none": 0.21951219512195122,
|
| 341 |
-
"acc_stderr,none": 0.
|
| 342 |
"acc_norm,none": 0.21951219512195122,
|
| 343 |
-
"acc_norm_stderr,none": 0.
|
| 344 |
"alias": " - cmmlu_logical"
|
| 345 |
},
|
| 346 |
"cmmlu_machine_learning": {
|
| 347 |
"acc,none": 0.2459016393442623,
|
| 348 |
-
"acc_stderr,none": 0.
|
| 349 |
"acc_norm,none": 0.2459016393442623,
|
| 350 |
-
"acc_norm_stderr,none": 0.
|
| 351 |
"alias": " - cmmlu_machine_learning"
|
| 352 |
},
|
| 353 |
"cmmlu_management": {
|
| 354 |
"acc,none": 0.24285714285714285,
|
| 355 |
-
"acc_stderr,none": 0.
|
| 356 |
"acc_norm,none": 0.24285714285714285,
|
| 357 |
-
"acc_norm_stderr,none": 0.
|
| 358 |
"alias": " - cmmlu_management"
|
| 359 |
},
|
| 360 |
"cmmlu_marketing": {
|
| 361 |
"acc,none": 0.26666666666666666,
|
| 362 |
-
"acc_stderr,none": 0.
|
| 363 |
"acc_norm,none": 0.26666666666666666,
|
| 364 |
-
"acc_norm_stderr,none": 0.
|
| 365 |
"alias": " - cmmlu_marketing"
|
| 366 |
},
|
| 367 |
"cmmlu_marxist_theory": {
|
| 368 |
"acc,none": 0.25925925925925924,
|
| 369 |
-
"acc_stderr,none": 0.
|
| 370 |
"acc_norm,none": 0.25925925925925924,
|
| 371 |
-
"acc_norm_stderr,none": 0.
|
| 372 |
"alias": " - cmmlu_marxist_theory"
|
| 373 |
},
|
| 374 |
"cmmlu_modern_chinese": {
|
|
@@ -380,16 +380,16 @@
|
|
| 380 |
},
|
| 381 |
"cmmlu_nutrition": {
|
| 382 |
"acc,none": 0.2896551724137931,
|
| 383 |
-
"acc_stderr,none": 0.
|
| 384 |
"acc_norm,none": 0.2896551724137931,
|
| 385 |
-
"acc_norm_stderr,none": 0.
|
| 386 |
"alias": " - cmmlu_nutrition"
|
| 387 |
},
|
| 388 |
"cmmlu_philosophy": {
|
| 389 |
"acc,none": 0.20952380952380953,
|
| 390 |
-
"acc_stderr,none": 0.
|
| 391 |
"acc_norm,none": 0.20952380952380953,
|
| 392 |
-
"acc_norm_stderr,none": 0.
|
| 393 |
"alias": " - cmmlu_philosophy"
|
| 394 |
},
|
| 395 |
"cmmlu_professional_accounting": {
|
|
@@ -401,16 +401,16 @@
|
|
| 401 |
},
|
| 402 |
"cmmlu_professional_law": {
|
| 403 |
"acc,none": 0.2559241706161137,
|
| 404 |
-
"acc_stderr,none": 0.
|
| 405 |
"acc_norm,none": 0.2559241706161137,
|
| 406 |
-
"acc_norm_stderr,none": 0.
|
| 407 |
"alias": " - cmmlu_professional_law"
|
| 408 |
},
|
| 409 |
"cmmlu_professional_medicine": {
|
| 410 |
"acc,none": 0.23670212765957446,
|
| 411 |
-
"acc_stderr,none": 0.
|
| 412 |
"acc_norm,none": 0.23670212765957446,
|
| 413 |
-
"acc_norm_stderr,none": 0.
|
| 414 |
"alias": " - cmmlu_professional_medicine"
|
| 415 |
},
|
| 416 |
"cmmlu_professional_psychology": {
|
|
@@ -436,16 +436,16 @@
|
|
| 436 |
},
|
| 437 |
"cmmlu_sociology": {
|
| 438 |
"acc,none": 0.252212389380531,
|
| 439 |
-
"acc_stderr,none": 0.
|
| 440 |
"acc_norm,none": 0.252212389380531,
|
| 441 |
-
"acc_norm_stderr,none": 0.
|
| 442 |
"alias": " - cmmlu_sociology"
|
| 443 |
},
|
| 444 |
"cmmlu_sports_science": {
|
| 445 |
"acc,none": 0.26666666666666666,
|
| 446 |
-
"acc_stderr,none": 0.
|
| 447 |
"acc_norm,none": 0.26666666666666666,
|
| 448 |
-
"acc_norm_stderr,none": 0.
|
| 449 |
"alias": " - cmmlu_sports_science"
|
| 450 |
},
|
| 451 |
"cmmlu_traditional_chinese_medicine": {
|
|
@@ -471,18 +471,18 @@
|
|
| 471 |
},
|
| 472 |
"cmmlu_world_religions": {
|
| 473 |
"acc,none": 0.2125,
|
| 474 |
-
"acc_stderr,none": 0.
|
| 475 |
"acc_norm,none": 0.2125,
|
| 476 |
-
"acc_norm_stderr,none": 0.
|
| 477 |
"alias": " - cmmlu_world_religions"
|
| 478 |
}
|
| 479 |
},
|
| 480 |
"groups": {
|
| 481 |
"cmmlu": {
|
| 482 |
-
"acc,none": 0.
|
| 483 |
-
"acc_stderr,none": 0.
|
| 484 |
-
"acc_norm,none": 0.
|
| 485 |
-
"acc_norm_stderr,none": 0.
|
| 486 |
"alias": "cmmlu"
|
| 487 |
}
|
| 488 |
},
|
|
@@ -3321,5 +3321,5 @@
|
|
| 3321 |
"bootstrap_iters": 100000,
|
| 3322 |
"gen_kwargs": null
|
| 3323 |
},
|
| 3324 |
-
"git_hash": "
|
| 3325 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"cmmlu": {
|
| 4 |
+
"acc,none": 0.24969780694180624,
|
| 5 |
+
"acc_stderr,none": 0.03810581351954912,
|
| 6 |
+
"acc_norm,none": 0.24969780694180624,
|
| 7 |
+
"acc_norm_stderr,none": 0.03810581351954912,
|
| 8 |
"alias": "cmmlu"
|
| 9 |
},
|
| 10 |
"cmmlu_agronomy": {
|
| 11 |
"acc,none": 0.21893491124260356,
|
| 12 |
+
"acc_stderr,none": 0.03190409884491231,
|
| 13 |
"acc_norm,none": 0.21893491124260356,
|
| 14 |
+
"acc_norm_stderr,none": 0.03190409884491231,
|
| 15 |
"alias": " - cmmlu_agronomy"
|
| 16 |
},
|
| 17 |
"cmmlu_anatomy": {
|
| 18 |
"acc,none": 0.24324324324324326,
|
| 19 |
+
"acc_stderr,none": 0.035386684903133896,
|
| 20 |
"acc_norm,none": 0.24324324324324326,
|
| 21 |
+
"acc_norm_stderr,none": 0.035386684903133896,
|
| 22 |
"alias": " - cmmlu_anatomy"
|
| 23 |
},
|
| 24 |
"cmmlu_ancient_chinese": {
|
| 25 |
"acc,none": 0.27439024390243905,
|
| 26 |
+
"acc_stderr,none": 0.0349495901617754,
|
| 27 |
"acc_norm,none": 0.27439024390243905,
|
| 28 |
+
"acc_norm_stderr,none": 0.0349495901617754,
|
| 29 |
"alias": " - cmmlu_ancient_chinese"
|
| 30 |
},
|
| 31 |
"cmmlu_arts": {
|
| 32 |
"acc,none": 0.25625,
|
| 33 |
+
"acc_stderr,none": 0.034621578458651416,
|
| 34 |
"acc_norm,none": 0.25625,
|
| 35 |
+
"acc_norm_stderr,none": 0.034621578458651416,
|
| 36 |
"alias": " - cmmlu_arts"
|
| 37 |
},
|
| 38 |
"cmmlu_astronomy": {
|
| 39 |
"acc,none": 0.24242424242424243,
|
| 40 |
+
"acc_stderr,none": 0.03346409881055953,
|
| 41 |
"acc_norm,none": 0.24242424242424243,
|
| 42 |
+
"acc_norm_stderr,none": 0.03346409881055953,
|
| 43 |
"alias": " - cmmlu_astronomy"
|
| 44 |
},
|
| 45 |
"cmmlu_business_ethics": {
|
| 46 |
"acc,none": 0.22009569377990432,
|
| 47 |
+
"acc_stderr,none": 0.028727297002576896,
|
| 48 |
"acc_norm,none": 0.22009569377990432,
|
| 49 |
+
"acc_norm_stderr,none": 0.028727297002576896,
|
| 50 |
"alias": " - cmmlu_business_ethics"
|
| 51 |
},
|
| 52 |
"cmmlu_chinese_civil_service_exam": {
|
|
|
|
| 58 |
},
|
| 59 |
"cmmlu_chinese_driving_rule": {
|
| 60 |
"acc,none": 0.2748091603053435,
|
| 61 |
+
"acc_stderr,none": 0.039153454088478354,
|
| 62 |
"acc_norm,none": 0.2748091603053435,
|
| 63 |
+
"acc_norm_stderr,none": 0.039153454088478354,
|
| 64 |
"alias": " - cmmlu_chinese_driving_rule"
|
| 65 |
},
|
| 66 |
"cmmlu_chinese_food_culture": {
|
|
|
|
| 72 |
},
|
| 73 |
"cmmlu_chinese_foreign_policy": {
|
| 74 |
"acc,none": 0.2336448598130841,
|
| 75 |
+
"acc_stderr,none": 0.041099848424639956,
|
| 76 |
"acc_norm,none": 0.2336448598130841,
|
| 77 |
+
"acc_norm_stderr,none": 0.041099848424639956,
|
| 78 |
"alias": " - cmmlu_chinese_foreign_policy"
|
| 79 |
},
|
| 80 |
"cmmlu_chinese_history": {
|
| 81 |
"acc,none": 0.24458204334365324,
|
| 82 |
+
"acc_stderr,none": 0.02395399754093217,
|
| 83 |
"acc_norm,none": 0.24458204334365324,
|
| 84 |
+
"acc_norm_stderr,none": 0.02395399754093217,
|
| 85 |
"alias": " - cmmlu_chinese_history"
|
| 86 |
},
|
| 87 |
"cmmlu_chinese_literature": {
|
|
|
|
| 93 |
},
|
| 94 |
"cmmlu_chinese_teacher_qualification": {
|
| 95 |
"acc,none": 0.2122905027932961,
|
| 96 |
+
"acc_stderr,none": 0.03065055356439329,
|
| 97 |
"acc_norm,none": 0.2122905027932961,
|
| 98 |
+
"acc_norm_stderr,none": 0.03065055356439329,
|
| 99 |
"alias": " - cmmlu_chinese_teacher_qualification"
|
| 100 |
},
|
| 101 |
"cmmlu_clinical_knowledge": {
|
| 102 |
"acc,none": 0.2489451476793249,
|
| 103 |
+
"acc_stderr,none": 0.028146970599422644,
|
| 104 |
"acc_norm,none": 0.2489451476793249,
|
| 105 |
+
"acc_norm_stderr,none": 0.028146970599422644,
|
| 106 |
"alias": " - cmmlu_clinical_knowledge"
|
| 107 |
},
|
| 108 |
"cmmlu_college_actuarial_science": {
|
| 109 |
"acc,none": 0.22641509433962265,
|
| 110 |
+
"acc_stderr,none": 0.040842473153371,
|
| 111 |
"acc_norm,none": 0.22641509433962265,
|
| 112 |
+
"acc_norm_stderr,none": 0.040842473153371,
|
| 113 |
"alias": " - cmmlu_college_actuarial_science"
|
| 114 |
},
|
| 115 |
"cmmlu_college_education": {
|
|
|
|
| 121 |
},
|
| 122 |
"cmmlu_college_engineering_hydrology": {
|
| 123 |
"acc,none": 0.2641509433962264,
|
| 124 |
+
"acc_stderr,none": 0.0430254877395901,
|
| 125 |
"acc_norm,none": 0.2641509433962264,
|
| 126 |
+
"acc_norm_stderr,none": 0.0430254877395901,
|
| 127 |
"alias": " - cmmlu_college_engineering_hydrology"
|
| 128 |
},
|
| 129 |
"cmmlu_college_law": {
|
|
|
|
| 142 |
},
|
| 143 |
"cmmlu_college_medical_statistics": {
|
| 144 |
"acc,none": 0.2830188679245283,
|
| 145 |
+
"acc_stderr,none": 0.043960933774393786,
|
| 146 |
"acc_norm,none": 0.2830188679245283,
|
| 147 |
+
"acc_norm_stderr,none": 0.043960933774393786,
|
| 148 |
"alias": " - cmmlu_college_medical_statistics"
|
| 149 |
},
|
| 150 |
"cmmlu_college_medicine": {
|
| 151 |
"acc,none": 0.2564102564102564,
|
| 152 |
+
"acc_stderr,none": 0.026475851706699707,
|
| 153 |
"acc_norm,none": 0.2564102564102564,
|
| 154 |
+
"acc_norm_stderr,none": 0.026475851706699707,
|
| 155 |
"alias": " - cmmlu_college_medicine"
|
| 156 |
},
|
| 157 |
"cmmlu_computer_science": {
|
|
|
|
| 177 |
},
|
| 178 |
"cmmlu_construction_project_management": {
|
| 179 |
"acc,none": 0.2158273381294964,
|
| 180 |
+
"acc_stderr,none": 0.03502027344986237,
|
| 181 |
"acc_norm,none": 0.2158273381294964,
|
| 182 |
+
"acc_norm_stderr,none": 0.03502027344986237,
|
| 183 |
"alias": " - cmmlu_construction_project_management"
|
| 184 |
},
|
| 185 |
"cmmlu_economics": {
|
| 186 |
"acc,none": 0.25157232704402516,
|
| 187 |
+
"acc_stderr,none": 0.034520558111649044,
|
| 188 |
"acc_norm,none": 0.25157232704402516,
|
| 189 |
+
"acc_norm_stderr,none": 0.034520558111649044,
|
| 190 |
"alias": " - cmmlu_economics"
|
| 191 |
},
|
| 192 |
"cmmlu_education": {
|
|
|
|
| 198 |
},
|
| 199 |
"cmmlu_electrical_engineering": {
|
| 200 |
"acc,none": 0.2441860465116279,
|
| 201 |
+
"acc_stderr,none": 0.03285260554707746,
|
| 202 |
"acc_norm,none": 0.2441860465116279,
|
| 203 |
+
"acc_norm_stderr,none": 0.03285260554707746,
|
| 204 |
"alias": " - cmmlu_electrical_engineering"
|
| 205 |
},
|
| 206 |
"cmmlu_elementary_chinese": {
|
| 207 |
"acc,none": 0.23809523809523808,
|
| 208 |
+
"acc_stderr,none": 0.026883687473220844,
|
| 209 |
"acc_norm,none": 0.23809523809523808,
|
| 210 |
+
"acc_norm_stderr,none": 0.026883687473220844,
|
| 211 |
"alias": " - cmmlu_elementary_chinese"
|
| 212 |
},
|
| 213 |
"cmmlu_elementary_commonsense": {
|
| 214 |
"acc,none": 0.23232323232323232,
|
| 215 |
+
"acc_stderr,none": 0.030088629490217487,
|
| 216 |
"acc_norm,none": 0.23232323232323232,
|
| 217 |
+
"acc_norm_stderr,none": 0.030088629490217487,
|
| 218 |
"alias": " - cmmlu_elementary_commonsense"
|
| 219 |
},
|
| 220 |
"cmmlu_elementary_information_and_technology": {
|
|
|
|
| 226 |
},
|
| 227 |
"cmmlu_elementary_mathematics": {
|
| 228 |
"acc,none": 0.2608695652173913,
|
| 229 |
+
"acc_stderr,none": 0.02901713355938126,
|
| 230 |
"acc_norm,none": 0.2608695652173913,
|
| 231 |
+
"acc_norm_stderr,none": 0.02901713355938126,
|
| 232 |
"alias": " - cmmlu_elementary_mathematics"
|
| 233 |
},
|
| 234 |
"cmmlu_ethnology": {
|
| 235 |
"acc,none": 0.2740740740740741,
|
| 236 |
+
"acc_stderr,none": 0.03853254836552003,
|
| 237 |
"acc_norm,none": 0.2740740740740741,
|
| 238 |
+
"acc_norm_stderr,none": 0.03853254836552003,
|
| 239 |
"alias": " - cmmlu_ethnology"
|
| 240 |
},
|
| 241 |
"cmmlu_food_science": {
|
| 242 |
"acc,none": 0.26573426573426573,
|
| 243 |
+
"acc_stderr,none": 0.0370686046262356,
|
| 244 |
"acc_norm,none": 0.26573426573426573,
|
| 245 |
+
"acc_norm_stderr,none": 0.0370686046262356,
|
| 246 |
"alias": " - cmmlu_food_science"
|
| 247 |
},
|
| 248 |
"cmmlu_genetics": {
|
|
|
|
| 254 |
},
|
| 255 |
"cmmlu_global_facts": {
|
| 256 |
"acc,none": 0.2348993288590604,
|
| 257 |
+
"acc_stderr,none": 0.03484731504650187,
|
| 258 |
"acc_norm,none": 0.2348993288590604,
|
| 259 |
+
"acc_norm_stderr,none": 0.03484731504650187,
|
| 260 |
"alias": " - cmmlu_global_facts"
|
| 261 |
},
|
| 262 |
"cmmlu_high_school_biology": {
|
| 263 |
"acc,none": 0.23668639053254437,
|
| 264 |
+
"acc_stderr,none": 0.0327931779226895,
|
| 265 |
"acc_norm,none": 0.23668639053254437,
|
| 266 |
+
"acc_norm_stderr,none": 0.0327931779226895,
|
| 267 |
"alias": " - cmmlu_high_school_biology"
|
| 268 |
},
|
| 269 |
"cmmlu_high_school_chemistry": {
|
|
|
|
| 275 |
},
|
| 276 |
"cmmlu_high_school_geography": {
|
| 277 |
"acc,none": 0.2457627118644068,
|
| 278 |
+
"acc_stderr,none": 0.03980329854920433,
|
| 279 |
"acc_norm,none": 0.2457627118644068,
|
| 280 |
+
"acc_norm_stderr,none": 0.03980329854920433,
|
| 281 |
"alias": " - cmmlu_high_school_geography"
|
| 282 |
},
|
| 283 |
"cmmlu_high_school_mathematics": {
|
| 284 |
"acc,none": 0.23170731707317074,
|
| 285 |
+
"acc_stderr,none": 0.03304756158810786,
|
| 286 |
"acc_norm,none": 0.23170731707317074,
|
| 287 |
+
"acc_norm_stderr,none": 0.03304756158810786,
|
| 288 |
"alias": " - cmmlu_high_school_mathematics"
|
| 289 |
},
|
| 290 |
"cmmlu_high_school_physics": {
|
|
|
|
| 296 |
},
|
| 297 |
"cmmlu_high_school_politics": {
|
| 298 |
"acc,none": 0.23076923076923078,
|
| 299 |
+
"acc_stderr,none": 0.0353568122905324,
|
| 300 |
"acc_norm,none": 0.23076923076923078,
|
| 301 |
+
"acc_norm_stderr,none": 0.0353568122905324,
|
| 302 |
"alias": " - cmmlu_high_school_politics"
|
| 303 |
},
|
| 304 |
"cmmlu_human_sexuality": {
|
| 305 |
"acc,none": 0.23809523809523808,
|
| 306 |
+
"acc_stderr,none": 0.038095238095238106,
|
| 307 |
"acc_norm,none": 0.23809523809523808,
|
| 308 |
+
"acc_norm_stderr,none": 0.038095238095238106,
|
| 309 |
"alias": " - cmmlu_human_sexuality"
|
| 310 |
},
|
| 311 |
"cmmlu_international_law": {
|
| 312 |
"acc,none": 0.24864864864864866,
|
| 313 |
+
"acc_stderr,none": 0.03186439492581517,
|
| 314 |
"acc_norm,none": 0.24864864864864866,
|
| 315 |
+
"acc_norm_stderr,none": 0.03186439492581517,
|
| 316 |
"alias": " - cmmlu_international_law"
|
| 317 |
},
|
| 318 |
"cmmlu_journalism": {
|
| 319 |
"acc,none": 0.23255813953488372,
|
| 320 |
+
"acc_stderr,none": 0.032306540832034485,
|
| 321 |
"acc_norm,none": 0.23255813953488372,
|
| 322 |
+
"acc_norm_stderr,none": 0.032306540832034485,
|
| 323 |
"alias": " - cmmlu_journalism"
|
| 324 |
},
|
| 325 |
"cmmlu_jurisprudence": {
|
|
|
|
| 331 |
},
|
| 332 |
"cmmlu_legal_and_moral_basis": {
|
| 333 |
"acc,none": 0.24766355140186916,
|
| 334 |
+
"acc_stderr,none": 0.029576535293164487,
|
| 335 |
"acc_norm,none": 0.24766355140186916,
|
| 336 |
+
"acc_norm_stderr,none": 0.029576535293164487,
|
| 337 |
"alias": " - cmmlu_legal_and_moral_basis"
|
| 338 |
},
|
| 339 |
"cmmlu_logical": {
|
| 340 |
"acc,none": 0.21951219512195122,
|
| 341 |
+
"acc_stderr,none": 0.0374742087608476,
|
| 342 |
"acc_norm,none": 0.21951219512195122,
|
| 343 |
+
"acc_norm_stderr,none": 0.0374742087608476,
|
| 344 |
"alias": " - cmmlu_logical"
|
| 345 |
},
|
| 346 |
"cmmlu_machine_learning": {
|
| 347 |
"acc,none": 0.2459016393442623,
|
| 348 |
+
"acc_stderr,none": 0.039147319035957334,
|
| 349 |
"acc_norm,none": 0.2459016393442623,
|
| 350 |
+
"acc_norm_stderr,none": 0.039147319035957334,
|
| 351 |
"alias": " - cmmlu_machine_learning"
|
| 352 |
},
|
| 353 |
"cmmlu_management": {
|
| 354 |
"acc,none": 0.24285714285714285,
|
| 355 |
+
"acc_stderr,none": 0.02966137041396583,
|
| 356 |
"acc_norm,none": 0.24285714285714285,
|
| 357 |
+
"acc_norm_stderr,none": 0.02966137041396583,
|
| 358 |
"alias": " - cmmlu_management"
|
| 359 |
},
|
| 360 |
"cmmlu_marketing": {
|
| 361 |
"acc,none": 0.26666666666666666,
|
| 362 |
+
"acc_stderr,none": 0.03305282343736874,
|
| 363 |
"acc_norm,none": 0.26666666666666666,
|
| 364 |
+
"acc_norm_stderr,none": 0.03305282343736874,
|
| 365 |
"alias": " - cmmlu_marketing"
|
| 366 |
},
|
| 367 |
"cmmlu_marxist_theory": {
|
| 368 |
"acc,none": 0.25925925925925924,
|
| 369 |
+
"acc_stderr,none": 0.03196107138009968,
|
| 370 |
"acc_norm,none": 0.25925925925925924,
|
| 371 |
+
"acc_norm_stderr,none": 0.03196107138009968,
|
| 372 |
"alias": " - cmmlu_marxist_theory"
|
| 373 |
},
|
| 374 |
"cmmlu_modern_chinese": {
|
|
|
|
| 380 |
},
|
| 381 |
"cmmlu_nutrition": {
|
| 382 |
"acc,none": 0.2896551724137931,
|
| 383 |
+
"acc_stderr,none": 0.03780019230438015,
|
| 384 |
"acc_norm,none": 0.2896551724137931,
|
| 385 |
+
"acc_norm_stderr,none": 0.03780019230438015,
|
| 386 |
"alias": " - cmmlu_nutrition"
|
| 387 |
},
|
| 388 |
"cmmlu_philosophy": {
|
| 389 |
"acc,none": 0.20952380952380953,
|
| 390 |
+
"acc_stderr,none": 0.03990657150993187,
|
| 391 |
"acc_norm,none": 0.20952380952380953,
|
| 392 |
+
"acc_norm_stderr,none": 0.03990657150993187,
|
| 393 |
"alias": " - cmmlu_philosophy"
|
| 394 |
},
|
| 395 |
"cmmlu_professional_accounting": {
|
|
|
|
| 401 |
},
|
| 402 |
"cmmlu_professional_law": {
|
| 403 |
"acc,none": 0.2559241706161137,
|
| 404 |
+
"acc_stderr,none": 0.03011304016776725,
|
| 405 |
"acc_norm,none": 0.2559241706161137,
|
| 406 |
+
"acc_norm_stderr,none": 0.03011304016776725,
|
| 407 |
"alias": " - cmmlu_professional_law"
|
| 408 |
},
|
| 409 |
"cmmlu_professional_medicine": {
|
| 410 |
"acc,none": 0.23670212765957446,
|
| 411 |
+
"acc_stderr,none": 0.02194989630475158,
|
| 412 |
"acc_norm,none": 0.23670212765957446,
|
| 413 |
+
"acc_norm_stderr,none": 0.02194989630475158,
|
| 414 |
"alias": " - cmmlu_professional_medicine"
|
| 415 |
},
|
| 416 |
"cmmlu_professional_psychology": {
|
|
|
|
| 436 |
},
|
| 437 |
"cmmlu_sociology": {
|
| 438 |
"acc,none": 0.252212389380531,
|
| 439 |
+
"acc_stderr,none": 0.028952167450890815,
|
| 440 |
"acc_norm,none": 0.252212389380531,
|
| 441 |
+
"acc_norm_stderr,none": 0.028952167450890815,
|
| 442 |
"alias": " - cmmlu_sociology"
|
| 443 |
},
|
| 444 |
"cmmlu_sports_science": {
|
| 445 |
"acc,none": 0.26666666666666666,
|
| 446 |
+
"acc_stderr,none": 0.03453131801885417,
|
| 447 |
"acc_norm,none": 0.26666666666666666,
|
| 448 |
+
"acc_norm_stderr,none": 0.03453131801885417,
|
| 449 |
"alias": " - cmmlu_sports_science"
|
| 450 |
},
|
| 451 |
"cmmlu_traditional_chinese_medicine": {
|
|
|
|
| 471 |
},
|
| 472 |
"cmmlu_world_religions": {
|
| 473 |
"acc,none": 0.2125,
|
| 474 |
+
"acc_stderr,none": 0.03244189290245474,
|
| 475 |
"acc_norm,none": 0.2125,
|
| 476 |
+
"acc_norm_stderr,none": 0.03244189290245474,
|
| 477 |
"alias": " - cmmlu_world_religions"
|
| 478 |
}
|
| 479 |
},
|
| 480 |
"groups": {
|
| 481 |
"cmmlu": {
|
| 482 |
+
"acc,none": 0.24969780694180624,
|
| 483 |
+
"acc_stderr,none": 0.03810581351954912,
|
| 484 |
+
"acc_norm,none": 0.24969780694180624,
|
| 485 |
+
"acc_norm_stderr,none": 0.03810581351954912,
|
| 486 |
"alias": "cmmlu"
|
| 487 |
}
|
| 488 |
},
|
|
|
|
| 3321 |
"bootstrap_iters": 100000,
|
| 3322 |
"gen_kwargs": null
|
| 3323 |
},
|
| 3324 |
+
"git_hash": "71d574c"
|
| 3325 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:281defb37394ca3c60db8d280285449abf3fae7c10823a7acf0db0da4ee58017
|
| 3 |
+
size 95121
|
lm-eval-output/RWKV/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
| 2 |
"results": {
|
| 3 |
"copa": {
|
| 4 |
"acc,none": 0.76,
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "copa"
|
| 7 |
}
|
| 8 |
},
|
|
@@ -54,5 +54,5 @@
|
|
| 54 |
"bootstrap_iters": 100000,
|
| 55 |
"gen_kwargs": null
|
| 56 |
},
|
| 57 |
-
"git_hash": "
|
| 58 |
}
|
|
|
|
| 2 |
"results": {
|
| 3 |
"copa": {
|
| 4 |
"acc,none": 0.76,
|
| 5 |
+
"acc_stderr,none": 0.04292346959909283,
|
| 6 |
"alias": "copa"
|
| 7 |
}
|
| 8 |
},
|
|
|
|
| 54 |
"bootstrap_iters": 100000,
|
| 55 |
"gen_kwargs": null
|
| 56 |
},
|
| 57 |
+
"git_hash": "71d574c"
|
| 58 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:052fbe447b9d850959ff768c051f62fb1f98e0897419f03d5bb264400dd84875
|
| 3 |
+
size 35072
|
lm-eval-output/RWKV/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,56 +1,56 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"glue": {
|
| 4 |
-
"
|
| 5 |
-
"
|
| 6 |
-
"
|
| 7 |
-
"
|
| 8 |
-
"
|
| 9 |
-
"
|
| 10 |
"alias": "glue"
|
| 11 |
},
|
| 12 |
"cola": {
|
| 13 |
-
"mcc,none": 0.
|
| 14 |
-
"mcc_stderr,none": 0.
|
| 15 |
"alias": " - cola"
|
| 16 |
},
|
| 17 |
"mnli": {
|
| 18 |
-
"acc,none": 0.
|
| 19 |
-
"acc_stderr,none": 0.
|
| 20 |
"alias": " - mnli"
|
| 21 |
},
|
| 22 |
"mnli_mismatch": {
|
| 23 |
-
"acc,none": 0.
|
| 24 |
-
"acc_stderr,none": 0.
|
| 25 |
"alias": " - mnli_mismatch"
|
| 26 |
},
|
| 27 |
"mrpc": {
|
| 28 |
"acc,none": 0.37254901960784315,
|
| 29 |
-
"acc_stderr,none": 0.
|
| 30 |
"f1,none": 0.26011560693641617,
|
| 31 |
-
"f1_stderr,none": 0.
|
| 32 |
"alias": " - mrpc"
|
| 33 |
},
|
| 34 |
"qnli": {
|
| 35 |
"acc,none": 0.5052169137836353,
|
| 36 |
-
"acc_stderr,none": 0.
|
| 37 |
"alias": " - qnli"
|
| 38 |
},
|
| 39 |
"qqp": {
|
| 40 |
-
"acc,none": 0.
|
| 41 |
-
"acc_stderr,none": 0.
|
| 42 |
-
"f1,none": 0.
|
| 43 |
-
"f1_stderr,none": 0.
|
| 44 |
"alias": " - qqp"
|
| 45 |
},
|
| 46 |
"rte": {
|
| 47 |
"acc,none": 0.51985559566787,
|
| 48 |
-
"acc_stderr,none": 0.
|
| 49 |
"alias": " - rte"
|
| 50 |
},
|
| 51 |
"sst2": {
|
| 52 |
"acc,none": 0.7568807339449541,
|
| 53 |
-
"acc_stderr,none": 0.
|
| 54 |
"alias": " - sst2"
|
| 55 |
},
|
| 56 |
"wnli": {
|
|
@@ -61,12 +61,12 @@
|
|
| 61 |
},
|
| 62 |
"groups": {
|
| 63 |
"glue": {
|
| 64 |
-
"
|
| 65 |
-
"
|
| 66 |
-
"
|
| 67 |
-
"
|
| 68 |
-
"
|
| 69 |
-
"
|
| 70 |
"alias": "glue"
|
| 71 |
}
|
| 72 |
},
|
|
@@ -370,5 +370,5 @@
|
|
| 370 |
"bootstrap_iters": 100000,
|
| 371 |
"gen_kwargs": null
|
| 372 |
},
|
| 373 |
-
"git_hash": "
|
| 374 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"glue": {
|
| 4 |
+
"mcc,none": 0.0349179702206949,
|
| 5 |
+
"mcc_stderr,none": 0.0008484644376896168,
|
| 6 |
+
"acc,none": 0.5404691507392698,
|
| 7 |
+
"acc_stderr,none": 0.10777769473364048,
|
| 8 |
+
"f1,none": 0.3992766080026964,
|
| 9 |
+
"f1_stderr,none": 0.0001853269058614133,
|
| 10 |
"alias": "glue"
|
| 11 |
},
|
| 12 |
"cola": {
|
| 13 |
+
"mcc,none": 0.0349179702206949,
|
| 14 |
+
"mcc_stderr,none": 0.029128412893421034,
|
| 15 |
"alias": " - cola"
|
| 16 |
},
|
| 17 |
"mnli": {
|
| 18 |
+
"acc,none": 0.3501782985226694,
|
| 19 |
+
"acc_stderr,none": 0.004815248368470374,
|
| 20 |
"alias": " - mnli"
|
| 21 |
},
|
| 22 |
"mnli_mismatch": {
|
| 23 |
+
"acc,none": 0.3462164361269325,
|
| 24 |
+
"acc_stderr,none": 0.004798350760585982,
|
| 25 |
"alias": " - mnli_mismatch"
|
| 26 |
},
|
| 27 |
"mrpc": {
|
| 28 |
"acc,none": 0.37254901960784315,
|
| 29 |
+
"acc_stderr,none": 0.023965384926716574,
|
| 30 |
"f1,none": 0.26011560693641617,
|
| 31 |
+
"f1_stderr,none": 0.031064481081471793,
|
| 32 |
"alias": " - mrpc"
|
| 33 |
},
|
| 34 |
"qnli": {
|
| 35 |
"acc,none": 0.5052169137836353,
|
| 36 |
+
"acc_stderr,none": 0.006765042284363291,
|
| 37 |
"alias": " - qnli"
|
| 38 |
},
|
| 39 |
"qqp": {
|
| 40 |
+
"acc,none": 0.6368785555280733,
|
| 41 |
+
"acc_stderr,none": 0.0023917058289082424,
|
| 42 |
+
"f1,none": 0.4004573855515171,
|
| 43 |
+
"f1_stderr,none": 0.003973945770740671,
|
| 44 |
"alias": " - qqp"
|
| 45 |
},
|
| 46 |
"rte": {
|
| 47 |
"acc,none": 0.51985559566787,
|
| 48 |
+
"acc_stderr,none": 0.030072723167317177,
|
| 49 |
"alias": " - rte"
|
| 50 |
},
|
| 51 |
"sst2": {
|
| 52 |
"acc,none": 0.7568807339449541,
|
| 53 |
+
"acc_stderr,none": 0.014534976562074281,
|
| 54 |
"alias": " - sst2"
|
| 55 |
},
|
| 56 |
"wnli": {
|
|
|
|
| 61 |
},
|
| 62 |
"groups": {
|
| 63 |
"glue": {
|
| 64 |
+
"mcc,none": 0.0349179702206949,
|
| 65 |
+
"mcc_stderr,none": 0.0008484644376896168,
|
| 66 |
+
"acc,none": 0.5404691507392698,
|
| 67 |
+
"acc_stderr,none": 0.10777769473364048,
|
| 68 |
+
"f1,none": 0.3992766080026964,
|
| 69 |
+
"f1_stderr,none": 0.0001853269058614133,
|
| 70 |
"alias": "glue"
|
| 71 |
}
|
| 72 |
},
|
|
|
|
| 370 |
"bootstrap_iters": 100000,
|
| 371 |
"gen_kwargs": null
|
| 372 |
},
|
| 373 |
+
"git_hash": "71d574c"
|
| 374 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0754d6144c7691ee7dc9fafa92ecccedb7a4c09691ffef18f4048a489601712c
|
| 3 |
+
size 85283
|
lm-eval-output/RWKV/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"results": {
|
| 3 |
"hellaswag": {
|
| 4 |
"acc,none": 0.42471619199362676,
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
-
"acc_norm,none": 0.
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"alias": "hellaswag"
|
| 9 |
}
|
| 10 |
},
|
|
@@ -63,5 +63,5 @@
|
|
| 63 |
"bootstrap_iters": 100000,
|
| 64 |
"gen_kwargs": null
|
| 65 |
},
|
| 66 |
-
"git_hash": "
|
| 67 |
}
|
|
|
|
| 2 |
"results": {
|
| 3 |
"hellaswag": {
|
| 4 |
"acc,none": 0.42471619199362676,
|
| 5 |
+
"acc_stderr,none": 0.004932896472460568,
|
| 6 |
+
"acc_norm,none": 0.5500896235809599,
|
| 7 |
+
"acc_norm_stderr,none": 0.004964679845918436,
|
| 8 |
"alias": "hellaswag"
|
| 9 |
}
|
| 10 |
},
|
|
|
|
| 63 |
"bootstrap_iters": 100000,
|
| 64 |
"gen_kwargs": null
|
| 65 |
},
|
| 66 |
+
"git_hash": "71d574c"
|
| 67 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:52221ae5689e4ef14c8fd8ce531c549072b9bc5a0534038a5b6ff0558b13d913
|
| 3 |
+
size 42376
|
lm-eval-output/RWKV/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,22 +1,22 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"lambada": {
|
| 4 |
-
"perplexity,none": 6.
|
| 5 |
-
"perplexity_stderr,none": 0.
|
| 6 |
"acc,none": 0.6089656510770425,
|
| 7 |
"acc_stderr,none": 0.02481100651285048,
|
| 8 |
"alias": "lambada"
|
| 9 |
},
|
| 10 |
"lambada_openai": {
|
| 11 |
-
"perplexity,none": 5.
|
| 12 |
-
"perplexity_stderr,none": 0.
|
| 13 |
"acc,none": 0.6567048321366195,
|
| 14 |
-
"acc_stderr,none": 0.
|
| 15 |
"alias": " - lambada_openai"
|
| 16 |
},
|
| 17 |
"lambada_standard": {
|
| 18 |
-
"perplexity,none": 7.
|
| 19 |
-
"perplexity_stderr,none": 0.
|
| 20 |
"acc,none": 0.5612264700174655,
|
| 21 |
"acc_stderr,none": 0.006913553944132544,
|
| 22 |
"alias": " - lambada_standard"
|
|
@@ -24,8 +24,8 @@
|
|
| 24 |
},
|
| 25 |
"groups": {
|
| 26 |
"lambada": {
|
| 27 |
-
"perplexity,none": 6.
|
| 28 |
-
"perplexity_stderr,none": 0.
|
| 29 |
"acc,none": 0.6089656510770425,
|
| 30 |
"acc_stderr,none": 0.02481100651285048,
|
| 31 |
"alias": "lambada"
|
|
@@ -122,5 +122,5 @@
|
|
| 122 |
"bootstrap_iters": 100000,
|
| 123 |
"gen_kwargs": null
|
| 124 |
},
|
| 125 |
-
"git_hash": "
|
| 126 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"lambada": {
|
| 4 |
+
"perplexity,none": 6.3686431181581575,
|
| 5 |
+
"perplexity_stderr,none": 0.6779725224774417,
|
| 6 |
"acc,none": 0.6089656510770425,
|
| 7 |
"acc_stderr,none": 0.02481100651285048,
|
| 8 |
"alias": "lambada"
|
| 9 |
},
|
| 10 |
"lambada_openai": {
|
| 11 |
+
"perplexity,none": 5.056405351554518,
|
| 12 |
+
"perplexity_stderr,none": 0.118739871048617,
|
| 13 |
"acc,none": 0.6567048321366195,
|
| 14 |
+
"acc_stderr,none": 0.006615017904433674,
|
| 15 |
"alias": " - lambada_openai"
|
| 16 |
},
|
| 17 |
"lambada_standard": {
|
| 18 |
+
"perplexity,none": 7.680880884761799,
|
| 19 |
+
"perplexity_stderr,none": 0.21007724325439336,
|
| 20 |
"acc,none": 0.5612264700174655,
|
| 21 |
"acc_stderr,none": 0.006913553944132544,
|
| 22 |
"alias": " - lambada_standard"
|
|
|
|
| 24 |
},
|
| 25 |
"groups": {
|
| 26 |
"lambada": {
|
| 27 |
+
"perplexity,none": 6.3686431181581575,
|
| 28 |
+
"perplexity_stderr,none": 0.6779725224774417,
|
| 29 |
"acc,none": 0.6089656510770425,
|
| 30 |
"acc_stderr,none": 0.02481100651285048,
|
| 31 |
"alias": "lambada"
|
|
|
|
| 122 |
"bootstrap_iters": 100000,
|
| 123 |
"gen_kwargs": null
|
| 124 |
},
|
| 125 |
+
"git_hash": "71d574c"
|
| 126 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5fb8c2cdfc39789df38f1a9afa82809890ae127be065846c84f22bbf36cd6549
|
| 3 |
+
size 34110
|
lm-eval-output/RWKV/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,54 +1,54 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"lambada_multilingual": {
|
| 4 |
-
"perplexity,none": 43.
|
| 5 |
-
"perplexity_stderr,none":
|
| 6 |
"acc,none": 0.4484766155637493,
|
| 7 |
-
"acc_stderr,none": 0.
|
| 8 |
"alias": "lambada_multilingual"
|
| 9 |
},
|
| 10 |
"lambada_openai_mt_de": {
|
| 11 |
-
"perplexity,none": 65.
|
| 12 |
-
"perplexity_stderr,none": 3.
|
| 13 |
"acc,none": 0.35066951290510384,
|
| 14 |
-
"acc_stderr,none": 0.
|
| 15 |
"alias": " - lambada_openai_mt_de"
|
| 16 |
},
|
| 17 |
"lambada_openai_mt_en": {
|
| 18 |
-
"perplexity,none": 5.
|
| 19 |
-
"perplexity_stderr,none": 0.
|
| 20 |
"acc,none": 0.6567048321366195,
|
| 21 |
-
"acc_stderr,none": 0.
|
| 22 |
"alias": " - lambada_openai_mt_en"
|
| 23 |
},
|
| 24 |
"lambada_openai_mt_es": {
|
| 25 |
"perplexity,none": 61.249035187327245,
|
| 26 |
-
"perplexity_stderr,none": 3.
|
| 27 |
"acc,none": 0.37104599262565496,
|
| 28 |
-
"acc_stderr,none": 0.
|
| 29 |
"alias": " - lambada_openai_mt_es"
|
| 30 |
},
|
| 31 |
"lambada_openai_mt_fr": {
|
| 32 |
"perplexity,none": 34.89400012412681,
|
| 33 |
-
"perplexity_stderr,none": 1.
|
| 34 |
"acc,none": 0.44944692412187076,
|
| 35 |
-
"acc_stderr,none": 0.
|
| 36 |
"alias": " - lambada_openai_mt_fr"
|
| 37 |
},
|
| 38 |
"lambada_openai_mt_it": {
|
| 39 |
"perplexity,none": 48.90485435913133,
|
| 40 |
-
"perplexity_stderr,none": 2.
|
| 41 |
"acc,none": 0.4145158160294974,
|
| 42 |
-
"acc_stderr,none": 0.
|
| 43 |
"alias": " - lambada_openai_mt_it"
|
| 44 |
}
|
| 45 |
},
|
| 46 |
"groups": {
|
| 47 |
"lambada_multilingual": {
|
| 48 |
-
"perplexity,none": 43.
|
| 49 |
-
"perplexity_stderr,none":
|
| 50 |
"acc,none": 0.4484766155637493,
|
| 51 |
-
"acc_stderr,none": 0.
|
| 52 |
"alias": "lambada_multilingual"
|
| 53 |
}
|
| 54 |
},
|
|
@@ -248,5 +248,5 @@
|
|
| 248 |
"bootstrap_iters": 100000,
|
| 249 |
"gen_kwargs": null
|
| 250 |
},
|
| 251 |
-
"git_hash": "
|
| 252 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"lambada_multilingual": {
|
| 4 |
+
"perplexity,none": 43.187518778032974,
|
| 5 |
+
"perplexity_stderr,none": 16.108588696505866,
|
| 6 |
"acc,none": 0.4484766155637493,
|
| 7 |
+
"acc_stderr,none": 0.07846427513001847,
|
| 8 |
"alias": "lambada_multilingual"
|
| 9 |
},
|
| 10 |
"lambada_openai_mt_de": {
|
| 11 |
+
"perplexity,none": 65.83378459091341,
|
| 12 |
+
"perplexity_stderr,none": 3.96819462540636,
|
| 13 |
"acc,none": 0.35066951290510384,
|
| 14 |
+
"acc_stderr,none": 0.0066480453746038904,
|
| 15 |
"alias": " - lambada_openai_mt_de"
|
| 16 |
},
|
| 17 |
"lambada_openai_mt_en": {
|
| 18 |
+
"perplexity,none": 5.05591962866606,
|
| 19 |
+
"perplexity_stderr,none": 0.11866670657060838,
|
| 20 |
"acc,none": 0.6567048321366195,
|
| 21 |
+
"acc_stderr,none": 0.006615017904433673,
|
| 22 |
"alias": " - lambada_openai_mt_en"
|
| 23 |
},
|
| 24 |
"lambada_openai_mt_es": {
|
| 25 |
"perplexity,none": 61.249035187327245,
|
| 26 |
+
"perplexity_stderr,none": 3.316972881129882,
|
| 27 |
"acc,none": 0.37104599262565496,
|
| 28 |
+
"acc_stderr,none": 0.006730314981342207,
|
| 29 |
"alias": " - lambada_openai_mt_es"
|
| 30 |
},
|
| 31 |
"lambada_openai_mt_fr": {
|
| 32 |
"perplexity,none": 34.89400012412681,
|
| 33 |
+
"perplexity_stderr,none": 1.8787228081908571,
|
| 34 |
"acc,none": 0.44944692412187076,
|
| 35 |
+
"acc_stderr,none": 0.006930281504471645,
|
| 36 |
"alias": " - lambada_openai_mt_fr"
|
| 37 |
},
|
| 38 |
"lambada_openai_mt_it": {
|
| 39 |
"perplexity,none": 48.90485435913133,
|
| 40 |
+
"perplexity_stderr,none": 2.8253905814308533,
|
| 41 |
"acc,none": 0.4145158160294974,
|
| 42 |
+
"acc_stderr,none": 0.006863414211397141,
|
| 43 |
"alias": " - lambada_openai_mt_it"
|
| 44 |
}
|
| 45 |
},
|
| 46 |
"groups": {
|
| 47 |
"lambada_multilingual": {
|
| 48 |
+
"perplexity,none": 43.187518778032974,
|
| 49 |
+
"perplexity_stderr,none": 16.108588696505866,
|
| 50 |
"acc,none": 0.4484766155637493,
|
| 51 |
+
"acc_stderr,none": 0.07846427513001847,
|
| 52 |
"alias": "lambada_multilingual"
|
| 53 |
}
|
| 54 |
},
|
|
|
|
| 248 |
"bootstrap_iters": 100000,
|
| 249 |
"gen_kwargs": null
|
| 250 |
},
|
| 251 |
+
"git_hash": "71d574c"
|
| 252 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ffa4b9c85ea77449d62be21c1184118fa3569ecbe0a715c02a576cb2b6ae3e54
|
| 3 |
+
size 54989
|
lm-eval-output/RWKV/rwkv-5-world-1b5/logiqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"results": {
|
| 3 |
"logiqa": {
|
| 4 |
"acc,none": 0.2457757296466974,
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"acc_norm,none": 0.29493087557603687,
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"alias": "logiqa"
|
| 9 |
}
|
| 10 |
},
|
|
@@ -62,5 +62,5 @@
|
|
| 62 |
"bootstrap_iters": 100000,
|
| 63 |
"gen_kwargs": null
|
| 64 |
},
|
| 65 |
-
"git_hash": "
|
| 66 |
}
|
|
|
|
| 2 |
"results": {
|
| 3 |
"logiqa": {
|
| 4 |
"acc,none": 0.2457757296466974,
|
| 5 |
+
"acc_stderr,none": 0.016887410894296927,
|
| 6 |
"acc_norm,none": 0.29493087557603687,
|
| 7 |
+
"acc_norm_stderr,none": 0.017886249734104402,
|
| 8 |
"alias": "logiqa"
|
| 9 |
}
|
| 10 |
},
|
|
|
|
| 62 |
"bootstrap_iters": 100000,
|
| 63 |
"gen_kwargs": null
|
| 64 |
},
|
| 65 |
+
"git_hash": "71d574c"
|
| 66 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/logiqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6b228c8f83e92a1233008a504a717e580418a04b543203506fc383bf60fd4ec2
|
| 3 |
+
size 32131
|
lm-eval-output/RWKV/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -2,13 +2,13 @@
|
|
| 2 |
"results": {
|
| 3 |
"mmlu": {
|
| 4 |
"acc,none": 0.2525993448226748,
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "mmlu"
|
| 7 |
},
|
| 8 |
"mmlu_humanities": {
|
| 9 |
"alias": " - humanities",
|
| 10 |
"acc,none": 0.24017003188097769,
|
| 11 |
-
"acc_stderr,none": 0.
|
| 12 |
},
|
| 13 |
"mmlu_formal_logic": {
|
| 14 |
"alias": " - formal_logic",
|
|
@@ -18,12 +18,12 @@
|
|
| 18 |
"mmlu_high_school_european_history": {
|
| 19 |
"alias": " - high_school_european_history",
|
| 20 |
"acc,none": 0.23030303030303031,
|
| 21 |
-
"acc_stderr,none": 0.
|
| 22 |
},
|
| 23 |
"mmlu_high_school_us_history": {
|
| 24 |
"alias": " - high_school_us_history",
|
| 25 |
"acc,none": 0.27941176470588236,
|
| 26 |
-
"acc_stderr,none": 0.
|
| 27 |
},
|
| 28 |
"mmlu_high_school_world_history": {
|
| 29 |
"alias": " - high_school_world_history",
|
|
@@ -48,17 +48,17 @@
|
|
| 48 |
"mmlu_moral_disputes": {
|
| 49 |
"alias": " - moral_disputes",
|
| 50 |
"acc,none": 0.21098265895953758,
|
| 51 |
-
"acc_stderr,none": 0.
|
| 52 |
},
|
| 53 |
"mmlu_moral_scenarios": {
|
| 54 |
"alias": " - moral_scenarios",
|
| 55 |
"acc,none": 0.2346368715083799,
|
| 56 |
-
"acc_stderr,none": 0.
|
| 57 |
},
|
| 58 |
"mmlu_philosophy": {
|
| 59 |
"alias": " - philosophy",
|
| 60 |
"acc,none": 0.2540192926045016,
|
| 61 |
-
"acc_stderr,none": 0.
|
| 62 |
},
|
| 63 |
"mmlu_prehistory": {
|
| 64 |
"alias": " - prehistory",
|
|
@@ -68,7 +68,7 @@
|
|
| 68 |
"mmlu_professional_law": {
|
| 69 |
"alias": " - professional_law",
|
| 70 |
"acc,none": 0.24967405475880053,
|
| 71 |
-
"acc_stderr,none": 0.
|
| 72 |
},
|
| 73 |
"mmlu_world_religions": {
|
| 74 |
"alias": " - world_religions",
|
|
@@ -77,8 +77,8 @@
|
|
| 77 |
},
|
| 78 |
"mmlu_other": {
|
| 79 |
"alias": " - other",
|
| 80 |
-
"acc,none": 0.
|
| 81 |
-
"acc_stderr,none": 0.
|
| 82 |
},
|
| 83 |
"mmlu_business_ethics": {
|
| 84 |
"alias": " - business_ethics",
|
|
@@ -88,17 +88,17 @@
|
|
| 88 |
"mmlu_clinical_knowledge": {
|
| 89 |
"alias": " - clinical_knowledge",
|
| 90 |
"acc,none": 0.32075471698113206,
|
| 91 |
-
"acc_stderr,none": 0.
|
| 92 |
},
|
| 93 |
"mmlu_college_medicine": {
|
| 94 |
"alias": " - college_medicine",
|
| 95 |
"acc,none": 0.3236994219653179,
|
| 96 |
-
"acc_stderr,none": 0.
|
| 97 |
},
|
| 98 |
"mmlu_global_facts": {
|
| 99 |
"alias": " - global_facts",
|
| 100 |
"acc,none": 0.2,
|
| 101 |
-
"acc_stderr,none": 0.
|
| 102 |
},
|
| 103 |
"mmlu_human_aging": {
|
| 104 |
"alias": " - human_aging",
|
|
@@ -113,32 +113,32 @@
|
|
| 113 |
"mmlu_marketing": {
|
| 114 |
"alias": " - marketing",
|
| 115 |
"acc,none": 0.2094017094017094,
|
| 116 |
-
"acc_stderr,none": 0.
|
| 117 |
},
|
| 118 |
"mmlu_medical_genetics": {
|
| 119 |
"alias": " - medical_genetics",
|
| 120 |
"acc,none": 0.32,
|
| 121 |
-
"acc_stderr,none": 0.
|
| 122 |
},
|
| 123 |
"mmlu_miscellaneous": {
|
| 124 |
"alias": " - miscellaneous",
|
| 125 |
"acc,none": 0.22349936143039592,
|
| 126 |
-
"acc_stderr,none": 0.
|
| 127 |
},
|
| 128 |
"mmlu_nutrition": {
|
| 129 |
"alias": " - nutrition",
|
| 130 |
"acc,none": 0.30718954248366015,
|
| 131 |
-
"acc_stderr,none": 0.
|
| 132 |
},
|
| 133 |
"mmlu_professional_accounting": {
|
| 134 |
"alias": " - professional_accounting",
|
| 135 |
"acc,none": 0.24822695035460993,
|
| 136 |
-
"acc_stderr,none": 0.
|
| 137 |
},
|
| 138 |
"mmlu_professional_medicine": {
|
| 139 |
"alias": " - professional_medicine",
|
| 140 |
"acc,none": 0.25735294117647056,
|
| 141 |
-
"acc_stderr,none": 0.
|
| 142 |
},
|
| 143 |
"mmlu_virology": {
|
| 144 |
"alias": " - virology",
|
|
@@ -147,18 +147,18 @@
|
|
| 147 |
},
|
| 148 |
"mmlu_social_sciences": {
|
| 149 |
"alias": " - social_sciences",
|
| 150 |
-
"acc,none": 0.
|
| 151 |
-
"acc_stderr,none": 0.
|
| 152 |
},
|
| 153 |
"mmlu_econometrics": {
|
| 154 |
"alias": " - econometrics",
|
| 155 |
"acc,none": 0.2719298245614035,
|
| 156 |
-
"acc_stderr,none": 0.
|
| 157 |
},
|
| 158 |
"mmlu_high_school_geography": {
|
| 159 |
"alias": " - high_school_geography",
|
| 160 |
"acc,none": 0.3333333333333333,
|
| 161 |
-
"acc_stderr,none": 0.
|
| 162 |
},
|
| 163 |
"mmlu_high_school_government_and_politics": {
|
| 164 |
"alias": " - high_school_government_and_politics",
|
|
@@ -168,12 +168,12 @@
|
|
| 168 |
"mmlu_high_school_macroeconomics": {
|
| 169 |
"alias": " - high_school_macroeconomics",
|
| 170 |
"acc,none": 0.258974358974359,
|
| 171 |
-
"acc_stderr,none": 0.
|
| 172 |
},
|
| 173 |
"mmlu_high_school_microeconomics": {
|
| 174 |
"alias": " - high_school_microeconomics",
|
| 175 |
"acc,none": 0.2605042016806723,
|
| 176 |
-
"acc_stderr,none": 0.
|
| 177 |
},
|
| 178 |
"mmlu_high_school_psychology": {
|
| 179 |
"alias": " - high_school_psychology",
|
|
@@ -193,12 +193,12 @@
|
|
| 193 |
"mmlu_public_relations": {
|
| 194 |
"alias": " - public_relations",
|
| 195 |
"acc,none": 0.24545454545454545,
|
| 196 |
-
"acc_stderr,none": 0.
|
| 197 |
},
|
| 198 |
"mmlu_security_studies": {
|
| 199 |
"alias": " - security_studies",
|
| 200 |
"acc,none": 0.2612244897959184,
|
| 201 |
-
"acc_stderr,none": 0.
|
| 202 |
},
|
| 203 |
"mmlu_sociology": {
|
| 204 |
"alias": " - sociology",
|
|
@@ -213,12 +213,12 @@
|
|
| 213 |
"mmlu_stem": {
|
| 214 |
"alias": " - stem",
|
| 215 |
"acc,none": 0.25531240088804313,
|
| 216 |
-
"acc_stderr,none": 0.
|
| 217 |
},
|
| 218 |
"mmlu_abstract_algebra": {
|
| 219 |
"alias": " - abstract_algebra",
|
| 220 |
"acc,none": 0.26,
|
| 221 |
-
"acc_stderr,none": 0.
|
| 222 |
},
|
| 223 |
"mmlu_anatomy": {
|
| 224 |
"alias": " - anatomy",
|
|
@@ -233,12 +233,12 @@
|
|
| 233 |
"mmlu_college_biology": {
|
| 234 |
"alias": " - college_biology",
|
| 235 |
"acc,none": 0.2777777777777778,
|
| 236 |
-
"acc_stderr,none": 0.
|
| 237 |
},
|
| 238 |
"mmlu_college_chemistry": {
|
| 239 |
"alias": " - college_chemistry",
|
| 240 |
"acc,none": 0.34,
|
| 241 |
-
"acc_stderr,none": 0.
|
| 242 |
},
|
| 243 |
"mmlu_college_computer_science": {
|
| 244 |
"alias": " - college_computer_science",
|
|
@@ -248,22 +248,22 @@
|
|
| 248 |
"mmlu_college_mathematics": {
|
| 249 |
"alias": " - college_mathematics",
|
| 250 |
"acc,none": 0.23,
|
| 251 |
-
"acc_stderr,none": 0.
|
| 252 |
},
|
| 253 |
"mmlu_college_physics": {
|
| 254 |
"alias": " - college_physics",
|
| 255 |
"acc,none": 0.2647058823529412,
|
| 256 |
-
"acc_stderr,none": 0.
|
| 257 |
},
|
| 258 |
"mmlu_computer_security": {
|
| 259 |
"alias": " - computer_security",
|
| 260 |
"acc,none": 0.22,
|
| 261 |
-
"acc_stderr,none": 0.
|
| 262 |
},
|
| 263 |
"mmlu_conceptual_physics": {
|
| 264 |
"alias": " - conceptual_physics",
|
| 265 |
"acc,none": 0.18723404255319148,
|
| 266 |
-
"acc_stderr,none": 0.
|
| 267 |
},
|
| 268 |
"mmlu_electrical_engineering": {
|
| 269 |
"alias": " - electrical_engineering",
|
|
@@ -273,17 +273,17 @@
|
|
| 273 |
"mmlu_elementary_mathematics": {
|
| 274 |
"alias": " - elementary_mathematics",
|
| 275 |
"acc,none": 0.2962962962962963,
|
| 276 |
-
"acc_stderr,none": 0.
|
| 277 |
},
|
| 278 |
"mmlu_high_school_biology": {
|
| 279 |
"alias": " - high_school_biology",
|
| 280 |
"acc,none": 0.2903225806451613,
|
| 281 |
-
"acc_stderr,none": 0.
|
| 282 |
},
|
| 283 |
"mmlu_high_school_chemistry": {
|
| 284 |
"alias": " - high_school_chemistry",
|
| 285 |
"acc,none": 0.22167487684729065,
|
| 286 |
-
"acc_stderr,none": 0.
|
| 287 |
},
|
| 288 |
"mmlu_high_school_computer_science": {
|
| 289 |
"alias": " - high_school_computer_science",
|
|
@@ -303,7 +303,7 @@
|
|
| 303 |
"mmlu_high_school_statistics": {
|
| 304 |
"alias": " - high_school_statistics",
|
| 305 |
"acc,none": 0.25925925925925924,
|
| 306 |
-
"acc_stderr,none": 0.
|
| 307 |
},
|
| 308 |
"mmlu_machine_learning": {
|
| 309 |
"alias": " - machine_learning",
|
|
@@ -314,28 +314,28 @@
|
|
| 314 |
"groups": {
|
| 315 |
"mmlu": {
|
| 316 |
"acc,none": 0.2525993448226748,
|
| 317 |
-
"acc_stderr,none": 0.
|
| 318 |
"alias": "mmlu"
|
| 319 |
},
|
| 320 |
"mmlu_humanities": {
|
| 321 |
"alias": " - humanities",
|
| 322 |
"acc,none": 0.24017003188097769,
|
| 323 |
-
"acc_stderr,none": 0.
|
| 324 |
},
|
| 325 |
"mmlu_other": {
|
| 326 |
"alias": " - other",
|
| 327 |
-
"acc,none": 0.
|
| 328 |
-
"acc_stderr,none": 0.
|
| 329 |
},
|
| 330 |
"mmlu_social_sciences": {
|
| 331 |
"alias": " - social_sciences",
|
| 332 |
-
"acc,none": 0.
|
| 333 |
-
"acc_stderr,none": 0.
|
| 334 |
},
|
| 335 |
"mmlu_stem": {
|
| 336 |
"alias": " - stem",
|
| 337 |
"acc,none": 0.25531240088804313,
|
| 338 |
-
"acc_stderr,none": 0.
|
| 339 |
}
|
| 340 |
},
|
| 341 |
"configs": {
|
|
@@ -2590,5 +2590,5 @@
|
|
| 2590 |
"bootstrap_iters": 100000,
|
| 2591 |
"gen_kwargs": null
|
| 2592 |
},
|
| 2593 |
-
"git_hash": "
|
| 2594 |
}
|
|
|
|
| 2 |
"results": {
|
| 3 |
"mmlu": {
|
| 4 |
"acc,none": 0.2525993448226748,
|
| 5 |
+
"acc_stderr,none": 0.0414928115354445,
|
| 6 |
"alias": "mmlu"
|
| 7 |
},
|
| 8 |
"mmlu_humanities": {
|
| 9 |
"alias": " - humanities",
|
| 10 |
"acc,none": 0.24017003188097769,
|
| 11 |
+
"acc_stderr,none": 0.029589768015471602
|
| 12 |
},
|
| 13 |
"mmlu_formal_logic": {
|
| 14 |
"alias": " - formal_logic",
|
|
|
|
| 18 |
"mmlu_high_school_european_history": {
|
| 19 |
"alias": " - high_school_european_history",
|
| 20 |
"acc,none": 0.23030303030303031,
|
| 21 |
+
"acc_stderr,none": 0.032876667586034886
|
| 22 |
},
|
| 23 |
"mmlu_high_school_us_history": {
|
| 24 |
"alias": " - high_school_us_history",
|
| 25 |
"acc,none": 0.27941176470588236,
|
| 26 |
+
"acc_stderr,none": 0.03149328104507957
|
| 27 |
},
|
| 28 |
"mmlu_high_school_world_history": {
|
| 29 |
"alias": " - high_school_world_history",
|
|
|
|
| 48 |
"mmlu_moral_disputes": {
|
| 49 |
"alias": " - moral_disputes",
|
| 50 |
"acc,none": 0.21098265895953758,
|
| 51 |
+
"acc_stderr,none": 0.021966309947043128
|
| 52 |
},
|
| 53 |
"mmlu_moral_scenarios": {
|
| 54 |
"alias": " - moral_scenarios",
|
| 55 |
"acc,none": 0.2346368715083799,
|
| 56 |
+
"acc_stderr,none": 0.014173044098303653
|
| 57 |
},
|
| 58 |
"mmlu_philosophy": {
|
| 59 |
"alias": " - philosophy",
|
| 60 |
"acc,none": 0.2540192926045016,
|
| 61 |
+
"acc_stderr,none": 0.024723861504771693
|
| 62 |
},
|
| 63 |
"mmlu_prehistory": {
|
| 64 |
"alias": " - prehistory",
|
|
|
|
| 68 |
"mmlu_professional_law": {
|
| 69 |
"alias": " - professional_law",
|
| 70 |
"acc,none": 0.24967405475880053,
|
| 71 |
+
"acc_stderr,none": 0.011054538377832317
|
| 72 |
},
|
| 73 |
"mmlu_world_religions": {
|
| 74 |
"alias": " - world_religions",
|
|
|
|
| 77 |
},
|
| 78 |
"mmlu_other": {
|
| 79 |
"alias": " - other",
|
| 80 |
+
"acc,none": 0.2568393949147087,
|
| 81 |
+
"acc_stderr,none": 0.05352976052385703
|
| 82 |
},
|
| 83 |
"mmlu_business_ethics": {
|
| 84 |
"alias": " - business_ethics",
|
|
|
|
| 88 |
"mmlu_clinical_knowledge": {
|
| 89 |
"alias": " - clinical_knowledge",
|
| 90 |
"acc,none": 0.32075471698113206,
|
| 91 |
+
"acc_stderr,none": 0.02872750295788027
|
| 92 |
},
|
| 93 |
"mmlu_college_medicine": {
|
| 94 |
"alias": " - college_medicine",
|
| 95 |
"acc,none": 0.3236994219653179,
|
| 96 |
+
"acc_stderr,none": 0.0356760379963917
|
| 97 |
},
|
| 98 |
"mmlu_global_facts": {
|
| 99 |
"alias": " - global_facts",
|
| 100 |
"acc,none": 0.2,
|
| 101 |
+
"acc_stderr,none": 0.040201512610368445
|
| 102 |
},
|
| 103 |
"mmlu_human_aging": {
|
| 104 |
"alias": " - human_aging",
|
|
|
|
| 113 |
"mmlu_marketing": {
|
| 114 |
"alias": " - marketing",
|
| 115 |
"acc,none": 0.2094017094017094,
|
| 116 |
+
"acc_stderr,none": 0.026655699653922737
|
| 117 |
},
|
| 118 |
"mmlu_medical_genetics": {
|
| 119 |
"alias": " - medical_genetics",
|
| 120 |
"acc,none": 0.32,
|
| 121 |
+
"acc_stderr,none": 0.046882617226215034
|
| 122 |
},
|
| 123 |
"mmlu_miscellaneous": {
|
| 124 |
"alias": " - miscellaneous",
|
| 125 |
"acc,none": 0.22349936143039592,
|
| 126 |
+
"acc_stderr,none": 0.01489723522945071
|
| 127 |
},
|
| 128 |
"mmlu_nutrition": {
|
| 129 |
"alias": " - nutrition",
|
| 130 |
"acc,none": 0.30718954248366015,
|
| 131 |
+
"acc_stderr,none": 0.026415601914388995
|
| 132 |
},
|
| 133 |
"mmlu_professional_accounting": {
|
| 134 |
"alias": " - professional_accounting",
|
| 135 |
"acc,none": 0.24822695035460993,
|
| 136 |
+
"acc_stderr,none": 0.02577001564429038
|
| 137 |
},
|
| 138 |
"mmlu_professional_medicine": {
|
| 139 |
"alias": " - professional_medicine",
|
| 140 |
"acc,none": 0.25735294117647056,
|
| 141 |
+
"acc_stderr,none": 0.0265565194700415
|
| 142 |
},
|
| 143 |
"mmlu_virology": {
|
| 144 |
"alias": " - virology",
|
|
|
|
| 147 |
},
|
| 148 |
"mmlu_social_sciences": {
|
| 149 |
"alias": " - social_sciences",
|
| 150 |
+
"acc,none": 0.2645433864153396,
|
| 151 |
+
"acc_stderr,none": 0.035566784463720184
|
| 152 |
},
|
| 153 |
"mmlu_econometrics": {
|
| 154 |
"alias": " - econometrics",
|
| 155 |
"acc,none": 0.2719298245614035,
|
| 156 |
+
"acc_stderr,none": 0.041857744240220554
|
| 157 |
},
|
| 158 |
"mmlu_high_school_geography": {
|
| 159 |
"alias": " - high_school_geography",
|
| 160 |
"acc,none": 0.3333333333333333,
|
| 161 |
+
"acc_stderr,none": 0.03358618145732523
|
| 162 |
},
|
| 163 |
"mmlu_high_school_government_and_politics": {
|
| 164 |
"alias": " - high_school_government_and_politics",
|
|
|
|
| 168 |
"mmlu_high_school_macroeconomics": {
|
| 169 |
"alias": " - high_school_macroeconomics",
|
| 170 |
"acc,none": 0.258974358974359,
|
| 171 |
+
"acc_stderr,none": 0.02221110681006167
|
| 172 |
},
|
| 173 |
"mmlu_high_school_microeconomics": {
|
| 174 |
"alias": " - high_school_microeconomics",
|
| 175 |
"acc,none": 0.2605042016806723,
|
| 176 |
+
"acc_stderr,none": 0.028510251512341923
|
| 177 |
},
|
| 178 |
"mmlu_high_school_psychology": {
|
| 179 |
"alias": " - high_school_psychology",
|
|
|
|
| 193 |
"mmlu_public_relations": {
|
| 194 |
"alias": " - public_relations",
|
| 195 |
"acc,none": 0.24545454545454545,
|
| 196 |
+
"acc_stderr,none": 0.04122066502878285
|
| 197 |
},
|
| 198 |
"mmlu_security_studies": {
|
| 199 |
"alias": " - security_studies",
|
| 200 |
"acc,none": 0.2612244897959184,
|
| 201 |
+
"acc_stderr,none": 0.028123429335142783
|
| 202 |
},
|
| 203 |
"mmlu_sociology": {
|
| 204 |
"alias": " - sociology",
|
|
|
|
| 213 |
"mmlu_stem": {
|
| 214 |
"alias": " - stem",
|
| 215 |
"acc,none": 0.25531240088804313,
|
| 216 |
+
"acc_stderr,none": 0.04639919683915553
|
| 217 |
},
|
| 218 |
"mmlu_abstract_algebra": {
|
| 219 |
"alias": " - abstract_algebra",
|
| 220 |
"acc,none": 0.26,
|
| 221 |
+
"acc_stderr,none": 0.04408440022768078
|
| 222 |
},
|
| 223 |
"mmlu_anatomy": {
|
| 224 |
"alias": " - anatomy",
|
|
|
|
| 233 |
"mmlu_college_biology": {
|
| 234 |
"alias": " - college_biology",
|
| 235 |
"acc,none": 0.2777777777777778,
|
| 236 |
+
"acc_stderr,none": 0.03745554791462456
|
| 237 |
},
|
| 238 |
"mmlu_college_chemistry": {
|
| 239 |
"alias": " - college_chemistry",
|
| 240 |
"acc,none": 0.34,
|
| 241 |
+
"acc_stderr,none": 0.047609522856952365
|
| 242 |
},
|
| 243 |
"mmlu_college_computer_science": {
|
| 244 |
"alias": " - college_computer_science",
|
|
|
|
| 248 |
"mmlu_college_mathematics": {
|
| 249 |
"alias": " - college_mathematics",
|
| 250 |
"acc,none": 0.23,
|
| 251 |
+
"acc_stderr,none": 0.04229525846816505
|
| 252 |
},
|
| 253 |
"mmlu_college_physics": {
|
| 254 |
"alias": " - college_physics",
|
| 255 |
"acc,none": 0.2647058823529412,
|
| 256 |
+
"acc_stderr,none": 0.04389869956808779
|
| 257 |
},
|
| 258 |
"mmlu_computer_security": {
|
| 259 |
"alias": " - computer_security",
|
| 260 |
"acc,none": 0.22,
|
| 261 |
+
"acc_stderr,none": 0.04163331998932269
|
| 262 |
},
|
| 263 |
"mmlu_conceptual_physics": {
|
| 264 |
"alias": " - conceptual_physics",
|
| 265 |
"acc,none": 0.18723404255319148,
|
| 266 |
+
"acc_stderr,none": 0.025501588341883596
|
| 267 |
},
|
| 268 |
"mmlu_electrical_engineering": {
|
| 269 |
"alias": " - electrical_engineering",
|
|
|
|
| 273 |
"mmlu_elementary_mathematics": {
|
| 274 |
"alias": " - elementary_mathematics",
|
| 275 |
"acc,none": 0.2962962962962963,
|
| 276 |
+
"acc_stderr,none": 0.023517294335963286
|
| 277 |
},
|
| 278 |
"mmlu_high_school_biology": {
|
| 279 |
"alias": " - high_school_biology",
|
| 280 |
"acc,none": 0.2903225806451613,
|
| 281 |
+
"acc_stderr,none": 0.025822106119415898
|
| 282 |
},
|
| 283 |
"mmlu_high_school_chemistry": {
|
| 284 |
"alias": " - high_school_chemistry",
|
| 285 |
"acc,none": 0.22167487684729065,
|
| 286 |
+
"acc_stderr,none": 0.029225575892489596
|
| 287 |
},
|
| 288 |
"mmlu_high_school_computer_science": {
|
| 289 |
"alias": " - high_school_computer_science",
|
|
|
|
| 303 |
"mmlu_high_school_statistics": {
|
| 304 |
"alias": " - high_school_statistics",
|
| 305 |
"acc,none": 0.25925925925925924,
|
| 306 |
+
"acc_stderr,none": 0.02988691054762697
|
| 307 |
},
|
| 308 |
"mmlu_machine_learning": {
|
| 309 |
"alias": " - machine_learning",
|
|
|
|
| 314 |
"groups": {
|
| 315 |
"mmlu": {
|
| 316 |
"acc,none": 0.2525993448226748,
|
| 317 |
+
"acc_stderr,none": 0.0414928115354445,
|
| 318 |
"alias": "mmlu"
|
| 319 |
},
|
| 320 |
"mmlu_humanities": {
|
| 321 |
"alias": " - humanities",
|
| 322 |
"acc,none": 0.24017003188097769,
|
| 323 |
+
"acc_stderr,none": 0.029589768015471602
|
| 324 |
},
|
| 325 |
"mmlu_other": {
|
| 326 |
"alias": " - other",
|
| 327 |
+
"acc,none": 0.2568393949147087,
|
| 328 |
+
"acc_stderr,none": 0.05352976052385703
|
| 329 |
},
|
| 330 |
"mmlu_social_sciences": {
|
| 331 |
"alias": " - social_sciences",
|
| 332 |
+
"acc,none": 0.2645433864153396,
|
| 333 |
+
"acc_stderr,none": 0.035566784463720184
|
| 334 |
},
|
| 335 |
"mmlu_stem": {
|
| 336 |
"alias": " - stem",
|
| 337 |
"acc,none": 0.25531240088804313,
|
| 338 |
+
"acc_stderr,none": 0.04639919683915553
|
| 339 |
}
|
| 340 |
},
|
| 341 |
"configs": {
|
|
|
|
| 2590 |
"bootstrap_iters": 100000,
|
| 2591 |
"gen_kwargs": null
|
| 2592 |
},
|
| 2593 |
+
"git_hash": "71d574c"
|
| 2594 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7654ebd2b0ccf4031968f494bb4c414fddc31e020ba53fc9a2f3c895f313f399
|
| 3 |
+
size 92883
|
lm-eval-output/RWKV/rwkv-5-world-1b5/nq_open/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"results": {
|
| 3 |
+
"nq_open": {
|
| 4 |
+
"exact_match,remove_whitespace": 0.002493074792243767,
|
| 5 |
+
"exact_match_stderr,remove_whitespace": 0.0008301033613701483,
|
| 6 |
+
"alias": "nq_open"
|
| 7 |
+
}
|
| 8 |
+
},
|
| 9 |
+
"configs": {
|
| 10 |
+
"nq_open": {
|
| 11 |
+
"task": "nq_open",
|
| 12 |
+
"dataset_path": "nq_open",
|
| 13 |
+
"training_split": "train",
|
| 14 |
+
"validation_split": "validation",
|
| 15 |
+
"doc_to_text": "Q: {{question}}?\nA:",
|
| 16 |
+
"doc_to_target": "{{answer}}",
|
| 17 |
+
"description": "Answer these questions:\n\n",
|
| 18 |
+
"target_delimiter": " ",
|
| 19 |
+
"fewshot_delimiter": "\n",
|
| 20 |
+
"metric_list": [
|
| 21 |
+
{
|
| 22 |
+
"metric": "exact_match",
|
| 23 |
+
"aggregation": "mean",
|
| 24 |
+
"higher_is_better": true,
|
| 25 |
+
"ignore_case": true,
|
| 26 |
+
"ignore_punctuation": true,
|
| 27 |
+
"regexes_to_ignore": [
|
| 28 |
+
"\\b(?:The |the |An |A |The |a |an )"
|
| 29 |
+
]
|
| 30 |
+
}
|
| 31 |
+
],
|
| 32 |
+
"output_type": "generate_until",
|
| 33 |
+
"generation_kwargs": {
|
| 34 |
+
"until": [
|
| 35 |
+
"\n",
|
| 36 |
+
".",
|
| 37 |
+
","
|
| 38 |
+
],
|
| 39 |
+
"do_sample": false,
|
| 40 |
+
"temperature": 0.0
|
| 41 |
+
},
|
| 42 |
+
"repeats": 1,
|
| 43 |
+
"filter_list": [
|
| 44 |
+
{
|
| 45 |
+
"name": "remove_whitespace",
|
| 46 |
+
"filter": [
|
| 47 |
+
{
|
| 48 |
+
"function": "remove_whitespace"
|
| 49 |
+
},
|
| 50 |
+
{
|
| 51 |
+
"function": "take_first"
|
| 52 |
+
}
|
| 53 |
+
]
|
| 54 |
+
}
|
| 55 |
+
],
|
| 56 |
+
"should_decontaminate": false,
|
| 57 |
+
"metadata": {
|
| 58 |
+
"version": 3.0
|
| 59 |
+
}
|
| 60 |
+
}
|
| 61 |
+
},
|
| 62 |
+
"versions": {
|
| 63 |
+
"nq_open": 3.0
|
| 64 |
+
},
|
| 65 |
+
"n-shot": {
|
| 66 |
+
"nq_open": 0
|
| 67 |
+
},
|
| 68 |
+
"config": {
|
| 69 |
+
"model": "hf",
|
| 70 |
+
"model_args": "pretrained=RWKV/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
|
| 71 |
+
"batch_size": "auto",
|
| 72 |
+
"batch_sizes": [],
|
| 73 |
+
"device": null,
|
| 74 |
+
"use_cache": null,
|
| 75 |
+
"limit": null,
|
| 76 |
+
"bootstrap_iters": 100000,
|
| 77 |
+
"gen_kwargs": null
|
| 78 |
+
},
|
| 79 |
+
"git_hash": "71d574c"
|
| 80 |
+
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/nq_open/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6436ac850841cd5d88b4f9da84e0c1264f204c37543b83f97969e6be73ef3a20
|
| 3 |
+
size 92622
|
lm-eval-output/RWKV/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"openbookqa": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
-
"acc_norm,none": 0.
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"alias": "openbookqa"
|
| 9 |
}
|
| 10 |
},
|
|
@@ -62,5 +62,5 @@
|
|
| 62 |
"bootstrap_iters": 100000,
|
| 63 |
"gen_kwargs": null
|
| 64 |
},
|
| 65 |
-
"git_hash": "
|
| 66 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"openbookqa": {
|
| 4 |
+
"acc,none": 0.252,
|
| 5 |
+
"acc_stderr,none": 0.019435727282249522,
|
| 6 |
+
"acc_norm,none": 0.356,
|
| 7 |
+
"acc_norm_stderr,none": 0.02143471235607265,
|
| 8 |
"alias": "openbookqa"
|
| 9 |
}
|
| 10 |
},
|
|
|
|
| 62 |
"bootstrap_iters": 100000,
|
| 63 |
"gen_kwargs": null
|
| 64 |
},
|
| 65 |
+
"git_hash": "71d574c"
|
| 66 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7504bce14c15206682399a76639b48c2d46823d031d22b639bb8578225302a35
|
| 3 |
+
size 33448
|
lm-eval-output/RWKV/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,38 +1,38 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"pawsx": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "pawsx"
|
| 7 |
},
|
| 8 |
"paws_de": {
|
| 9 |
-
"acc,none": 0.
|
| 10 |
-
"acc_stderr,none": 0.
|
| 11 |
"alias": " - paws_de"
|
| 12 |
},
|
| 13 |
"paws_en": {
|
| 14 |
-
"acc,none": 0.
|
| 15 |
-
"acc_stderr,none": 0.
|
| 16 |
"alias": " - paws_en"
|
| 17 |
},
|
| 18 |
"paws_es": {
|
| 19 |
"acc,none": 0.533,
|
| 20 |
-
"acc_stderr,none": 0.
|
| 21 |
"alias": " - paws_es"
|
| 22 |
},
|
| 23 |
"paws_fr": {
|
| 24 |
"acc,none": 0.5485,
|
| 25 |
-
"acc_stderr,none": 0.
|
| 26 |
"alias": " - paws_fr"
|
| 27 |
},
|
| 28 |
"paws_ja": {
|
| 29 |
"acc,none": 0.557,
|
| 30 |
-
"acc_stderr,none": 0.
|
| 31 |
"alias": " - paws_ja"
|
| 32 |
},
|
| 33 |
"paws_ko": {
|
| 34 |
-
"acc,none": 0.
|
| 35 |
-
"acc_stderr,none": 0.
|
| 36 |
"alias": " - paws_ko"
|
| 37 |
},
|
| 38 |
"paws_zh": {
|
|
@@ -43,8 +43,8 @@
|
|
| 43 |
},
|
| 44 |
"groups": {
|
| 45 |
"pawsx": {
|
| 46 |
-
"acc,none": 0.
|
| 47 |
-
"acc_stderr,none": 0.
|
| 48 |
"alias": "pawsx"
|
| 49 |
}
|
| 50 |
},
|
|
@@ -279,5 +279,5 @@
|
|
| 279 |
"bootstrap_iters": 100000,
|
| 280 |
"gen_kwargs": null
|
| 281 |
},
|
| 282 |
-
"git_hash": "
|
| 283 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"pawsx": {
|
| 4 |
+
"acc,none": 0.5192857142857144,
|
| 5 |
+
"acc_stderr,none": 0.029939594331147804,
|
| 6 |
"alias": "pawsx"
|
| 7 |
},
|
| 8 |
"paws_de": {
|
| 9 |
+
"acc,none": 0.4845,
|
| 10 |
+
"acc_stderr,none": 0.011177761232603322,
|
| 11 |
"alias": " - paws_de"
|
| 12 |
},
|
| 13 |
"paws_en": {
|
| 14 |
+
"acc,none": 0.456,
|
| 15 |
+
"acc_stderr,none": 0.011139750761283311,
|
| 16 |
"alias": " - paws_en"
|
| 17 |
},
|
| 18 |
"paws_es": {
|
| 19 |
"acc,none": 0.533,
|
| 20 |
+
"acc_stderr,none": 0.011158752568250675,
|
| 21 |
"alias": " - paws_es"
|
| 22 |
},
|
| 23 |
"paws_fr": {
|
| 24 |
"acc,none": 0.5485,
|
| 25 |
+
"acc_stderr,none": 0.011130400617630765,
|
| 26 |
"alias": " - paws_fr"
|
| 27 |
},
|
| 28 |
"paws_ja": {
|
| 29 |
"acc,none": 0.557,
|
| 30 |
+
"acc_stderr,none": 0.011110230358066709,
|
| 31 |
"alias": " - paws_ja"
|
| 32 |
},
|
| 33 |
"paws_ko": {
|
| 34 |
+
"acc,none": 0.52,
|
| 35 |
+
"acc_stderr,none": 0.011174185930778305,
|
| 36 |
"alias": " - paws_ko"
|
| 37 |
},
|
| 38 |
"paws_zh": {
|
|
|
|
| 43 |
},
|
| 44 |
"groups": {
|
| 45 |
"pawsx": {
|
| 46 |
+
"acc,none": 0.5192857142857144,
|
| 47 |
+
"acc_stderr,none": 0.029939594331147804,
|
| 48 |
"alias": "pawsx"
|
| 49 |
}
|
| 50 |
},
|
|
|
|
| 279 |
"bootstrap_iters": 100000,
|
| 280 |
"gen_kwargs": null
|
| 281 |
},
|
| 282 |
+
"git_hash": "71d574c"
|
| 283 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:bde2e14f93bd6a756d63648e64afb7bb80dfc03eb97f3fb24caa60059a7f7f5a
|
| 3 |
+
size 36873
|
lm-eval-output/RWKV/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"piqa": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
-
"acc_norm,none": 0.
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"alias": "piqa"
|
| 9 |
}
|
| 10 |
},
|
|
@@ -60,5 +60,5 @@
|
|
| 60 |
"bootstrap_iters": 100000,
|
| 61 |
"gen_kwargs": null
|
| 62 |
},
|
| 63 |
-
"git_hash": "
|
| 64 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"piqa": {
|
| 4 |
+
"acc,none": 0.7143634385201306,
|
| 5 |
+
"acc_stderr,none": 0.010539303948661921,
|
| 6 |
+
"acc_norm,none": 0.7149075081610446,
|
| 7 |
+
"acc_norm_stderr,none": 0.010533270588738932,
|
| 8 |
"alias": "piqa"
|
| 9 |
}
|
| 10 |
},
|
|
|
|
| 60 |
"bootstrap_iters": 100000,
|
| 61 |
"gen_kwargs": null
|
| 62 |
},
|
| 63 |
+
"git_hash": "71d574c"
|
| 64 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:613b1d76e81bc5a07dd2a40ab29c31d22e8a16117af341b8bee92968e4e9cc08
|
| 3 |
+
size 33565
|
lm-eval-output/RWKV/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,64 +1,64 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"pythia": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
-
"acc_norm,none": 0.
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"word_perplexity,none": 14.373441237489386,
|
| 9 |
"word_perplexity_stderr,none": "N/A",
|
| 10 |
"byte_perplexity,none": 1.646150916185073,
|
| 11 |
"byte_perplexity_stderr,none": "N/A",
|
| 12 |
"bits_per_byte,none": 0.719096605535433,
|
| 13 |
"bits_per_byte_stderr,none": "N/A",
|
| 14 |
-
"perplexity,none": 5.
|
| 15 |
-
"perplexity_stderr,none": 0.
|
| 16 |
"alias": "pythia"
|
| 17 |
},
|
| 18 |
"ai2_arc": {
|
| 19 |
-
"acc,none": 0.
|
| 20 |
-
"acc_stderr,none": 0.
|
| 21 |
-
"acc_norm,none": 0.
|
| 22 |
-
"acc_norm_stderr,none": 0.
|
| 23 |
"alias": " - ai2_arc"
|
| 24 |
},
|
| 25 |
"arc_challenge": {
|
| 26 |
"acc,none": 0.28668941979522183,
|
| 27 |
-
"acc_stderr,none": 0.
|
| 28 |
"acc_norm,none": 0.3302047781569966,
|
| 29 |
-
"acc_norm_stderr,none": 0.
|
| 30 |
"alias": " - arc_challenge"
|
| 31 |
},
|
| 32 |
"arc_easy": {
|
| 33 |
-
"acc,none": 0.
|
| 34 |
-
"acc_stderr,none": 0.
|
| 35 |
-
"acc_norm,none": 0.
|
| 36 |
-
"acc_norm_stderr,none": 0.
|
| 37 |
"alias": " - arc_easy"
|
| 38 |
},
|
| 39 |
"blimp": {
|
| 40 |
-
"acc,none": 0.
|
| 41 |
-
"acc_stderr,none": 0.
|
| 42 |
"alias": " - blimp"
|
| 43 |
},
|
| 44 |
"blimp_adjunct_island": {
|
| 45 |
"acc,none": 0.901,
|
| 46 |
-
"acc_stderr,none": 0.
|
| 47 |
"alias": " - blimp_adjunct_island"
|
| 48 |
},
|
| 49 |
"blimp_anaphor_gender_agreement": {
|
| 50 |
"acc,none": 0.992,
|
| 51 |
-
"acc_stderr,none": 0.
|
| 52 |
"alias": " - blimp_anaphor_gender_agreement"
|
| 53 |
},
|
| 54 |
"blimp_anaphor_number_agreement": {
|
| 55 |
"acc,none": 0.995,
|
| 56 |
-
"acc_stderr,none": 0.
|
| 57 |
"alias": " - blimp_anaphor_number_agreement"
|
| 58 |
},
|
| 59 |
"blimp_animate_subject_passive": {
|
| 60 |
"acc,none": 0.804,
|
| 61 |
-
"acc_stderr,none": 0.
|
| 62 |
"alias": " - blimp_animate_subject_passive"
|
| 63 |
},
|
| 64 |
"blimp_animate_subject_trans": {
|
|
@@ -68,7 +68,7 @@
|
|
| 68 |
},
|
| 69 |
"blimp_causative": {
|
| 70 |
"acc,none": 0.781,
|
| 71 |
-
"acc_stderr,none": 0.
|
| 72 |
"alias": " - blimp_causative"
|
| 73 |
},
|
| 74 |
"blimp_complex_NP_island": {
|
|
@@ -78,127 +78,127 @@
|
|
| 78 |
},
|
| 79 |
"blimp_coordinate_structure_constraint_complex_left_branch": {
|
| 80 |
"acc,none": 0.744,
|
| 81 |
-
"acc_stderr,none": 0.
|
| 82 |
"alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
|
| 83 |
},
|
| 84 |
"blimp_coordinate_structure_constraint_object_extraction": {
|
| 85 |
"acc,none": 0.848,
|
| 86 |
-
"acc_stderr,none": 0.
|
| 87 |
"alias": " - blimp_coordinate_structure_constraint_object_extraction"
|
| 88 |
},
|
| 89 |
"blimp_determiner_noun_agreement_1": {
|
| 90 |
"acc,none": 0.998,
|
| 91 |
-
"acc_stderr,none": 0.
|
| 92 |
"alias": " - blimp_determiner_noun_agreement_1"
|
| 93 |
},
|
| 94 |
"blimp_determiner_noun_agreement_2": {
|
| 95 |
"acc,none": 0.991,
|
| 96 |
-
"acc_stderr,none": 0.
|
| 97 |
"alias": " - blimp_determiner_noun_agreement_2"
|
| 98 |
},
|
| 99 |
"blimp_determiner_noun_agreement_irregular_1": {
|
| 100 |
"acc,none": 0.961,
|
| 101 |
-
"acc_stderr,none": 0.
|
| 102 |
"alias": " - blimp_determiner_noun_agreement_irregular_1"
|
| 103 |
},
|
| 104 |
"blimp_determiner_noun_agreement_irregular_2": {
|
| 105 |
"acc,none": 0.956,
|
| 106 |
-
"acc_stderr,none": 0.
|
| 107 |
"alias": " - blimp_determiner_noun_agreement_irregular_2"
|
| 108 |
},
|
| 109 |
"blimp_determiner_noun_agreement_with_adj_2": {
|
| 110 |
"acc,none": 0.964,
|
| 111 |
-
"acc_stderr,none": 0.
|
| 112 |
"alias": " - blimp_determiner_noun_agreement_with_adj_2"
|
| 113 |
},
|
| 114 |
"blimp_determiner_noun_agreement_with_adj_irregular_1": {
|
| 115 |
"acc,none": 0.939,
|
| 116 |
-
"acc_stderr,none": 0.
|
| 117 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
|
| 118 |
},
|
| 119 |
"blimp_determiner_noun_agreement_with_adj_irregular_2": {
|
| 120 |
"acc,none": 0.921,
|
| 121 |
-
"acc_stderr,none": 0.
|
| 122 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
|
| 123 |
},
|
| 124 |
"blimp_determiner_noun_agreement_with_adjective_1": {
|
| 125 |
"acc,none": 0.981,
|
| 126 |
-
"acc_stderr,none": 0.
|
| 127 |
"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
|
| 128 |
},
|
| 129 |
"blimp_distractor_agreement_relational_noun": {
|
| 130 |
"acc,none": 0.887,
|
| 131 |
-
"acc_stderr,none": 0.
|
| 132 |
"alias": " - blimp_distractor_agreement_relational_noun"
|
| 133 |
},
|
| 134 |
"blimp_distractor_agreement_relative_clause": {
|
| 135 |
-
"acc,none": 0.
|
| 136 |
-
"acc_stderr,none": 0.
|
| 137 |
"alias": " - blimp_distractor_agreement_relative_clause"
|
| 138 |
},
|
| 139 |
"blimp_drop_argument": {
|
| 140 |
"acc,none": 0.804,
|
| 141 |
-
"acc_stderr,none": 0.
|
| 142 |
"alias": " - blimp_drop_argument"
|
| 143 |
},
|
| 144 |
"blimp_ellipsis_n_bar_1": {
|
| 145 |
"acc,none": 0.86,
|
| 146 |
-
"acc_stderr,none": 0.
|
| 147 |
"alias": " - blimp_ellipsis_n_bar_1"
|
| 148 |
},
|
| 149 |
"blimp_ellipsis_n_bar_2": {
|
| 150 |
"acc,none": 0.885,
|
| 151 |
-
"acc_stderr,none": 0.
|
| 152 |
"alias": " - blimp_ellipsis_n_bar_2"
|
| 153 |
},
|
| 154 |
"blimp_existential_there_object_raising": {
|
| 155 |
"acc,none": 0.842,
|
| 156 |
-
"acc_stderr,none": 0.
|
| 157 |
"alias": " - blimp_existential_there_object_raising"
|
| 158 |
},
|
| 159 |
"blimp_existential_there_quantifiers_1": {
|
| 160 |
"acc,none": 0.99,
|
| 161 |
-
"acc_stderr,none": 0.
|
| 162 |
"alias": " - blimp_existential_there_quantifiers_1"
|
| 163 |
},
|
| 164 |
"blimp_existential_there_quantifiers_2": {
|
| 165 |
"acc,none": 0.274,
|
| 166 |
-
"acc_stderr,none": 0.
|
| 167 |
"alias": " - blimp_existential_there_quantifiers_2"
|
| 168 |
},
|
| 169 |
"blimp_existential_there_subject_raising": {
|
| 170 |
"acc,none": 0.931,
|
| 171 |
-
"acc_stderr,none": 0.
|
| 172 |
"alias": " - blimp_existential_there_subject_raising"
|
| 173 |
},
|
| 174 |
"blimp_expletive_it_object_raising": {
|
| 175 |
"acc,none": 0.827,
|
| 176 |
-
"acc_stderr,none": 0.
|
| 177 |
"alias": " - blimp_expletive_it_object_raising"
|
| 178 |
},
|
| 179 |
"blimp_inchoative": {
|
| 180 |
-
"acc,none": 0.
|
| 181 |
-
"acc_stderr,none": 0.
|
| 182 |
"alias": " - blimp_inchoative"
|
| 183 |
},
|
| 184 |
"blimp_intransitive": {
|
| 185 |
"acc,none": 0.858,
|
| 186 |
-
"acc_stderr,none": 0.
|
| 187 |
"alias": " - blimp_intransitive"
|
| 188 |
},
|
| 189 |
"blimp_irregular_past_participle_adjectives": {
|
| 190 |
"acc,none": 0.992,
|
| 191 |
-
"acc_stderr,none": 0.
|
| 192 |
"alias": " - blimp_irregular_past_participle_adjectives"
|
| 193 |
},
|
| 194 |
"blimp_irregular_past_participle_verbs": {
|
| 195 |
"acc,none": 0.915,
|
| 196 |
-
"acc_stderr,none": 0.
|
| 197 |
"alias": " - blimp_irregular_past_participle_verbs"
|
| 198 |
},
|
| 199 |
"blimp_irregular_plural_subject_verb_agreement_1": {
|
| 200 |
"acc,none": 0.934,
|
| 201 |
-
"acc_stderr,none": 0.
|
| 202 |
"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
|
| 203 |
},
|
| 204 |
"blimp_irregular_plural_subject_verb_agreement_2": {
|
|
@@ -208,27 +208,27 @@
|
|
| 208 |
},
|
| 209 |
"blimp_left_branch_island_echo_question": {
|
| 210 |
"acc,none": 0.456,
|
| 211 |
-
"acc_stderr,none": 0.
|
| 212 |
"alias": " - blimp_left_branch_island_echo_question"
|
| 213 |
},
|
| 214 |
"blimp_left_branch_island_simple_question": {
|
| 215 |
"acc,none": 0.847,
|
| 216 |
-
"acc_stderr,none": 0.
|
| 217 |
"alias": " - blimp_left_branch_island_simple_question"
|
| 218 |
},
|
| 219 |
"blimp_matrix_question_npi_licensor_present": {
|
| 220 |
"acc,none": 0.708,
|
| 221 |
-
"acc_stderr,none": 0.
|
| 222 |
"alias": " - blimp_matrix_question_npi_licensor_present"
|
| 223 |
},
|
| 224 |
"blimp_npi_present_1": {
|
| 225 |
"acc,none": 0.57,
|
| 226 |
-
"acc_stderr,none": 0.
|
| 227 |
"alias": " - blimp_npi_present_1"
|
| 228 |
},
|
| 229 |
"blimp_npi_present_2": {
|
| 230 |
"acc,none": 0.662,
|
| 231 |
-
"acc_stderr,none": 0.
|
| 232 |
"alias": " - blimp_npi_present_2"
|
| 233 |
},
|
| 234 |
"blimp_only_npi_licensor_present": {
|
|
@@ -238,22 +238,22 @@
|
|
| 238 |
},
|
| 239 |
"blimp_only_npi_scope": {
|
| 240 |
"acc,none": 0.726,
|
| 241 |
-
"acc_stderr,none": 0.
|
| 242 |
"alias": " - blimp_only_npi_scope"
|
| 243 |
},
|
| 244 |
"blimp_passive_1": {
|
| 245 |
-
"acc,none": 0.
|
| 246 |
-
"acc_stderr,none": 0.
|
| 247 |
"alias": " - blimp_passive_1"
|
| 248 |
},
|
| 249 |
"blimp_passive_2": {
|
| 250 |
"acc,none": 0.909,
|
| 251 |
-
"acc_stderr,none": 0.
|
| 252 |
"alias": " - blimp_passive_2"
|
| 253 |
},
|
| 254 |
"blimp_principle_A_c_command": {
|
| 255 |
-
"acc,none": 0.
|
| 256 |
-
"acc_stderr,none": 0.
|
| 257 |
"alias": " - blimp_principle_A_c_command"
|
| 258 |
},
|
| 259 |
"blimp_principle_A_case_1": {
|
|
@@ -263,62 +263,62 @@
|
|
| 263 |
},
|
| 264 |
"blimp_principle_A_case_2": {
|
| 265 |
"acc,none": 0.965,
|
| 266 |
-
"acc_stderr,none": 0.
|
| 267 |
"alias": " - blimp_principle_A_case_2"
|
| 268 |
},
|
| 269 |
"blimp_principle_A_domain_1": {
|
| 270 |
"acc,none": 0.993,
|
| 271 |
-
"acc_stderr,none": 0.
|
| 272 |
"alias": " - blimp_principle_A_domain_1"
|
| 273 |
},
|
| 274 |
"blimp_principle_A_domain_2": {
|
| 275 |
"acc,none": 0.903,
|
| 276 |
-
"acc_stderr,none": 0.
|
| 277 |
"alias": " - blimp_principle_A_domain_2"
|
| 278 |
},
|
| 279 |
"blimp_principle_A_domain_3": {
|
| 280 |
"acc,none": 0.755,
|
| 281 |
-
"acc_stderr,none": 0.
|
| 282 |
"alias": " - blimp_principle_A_domain_3"
|
| 283 |
},
|
| 284 |
"blimp_principle_A_reconstruction": {
|
| 285 |
"acc,none": 0.469,
|
| 286 |
-
"acc_stderr,none": 0.
|
| 287 |
"alias": " - blimp_principle_A_reconstruction"
|
| 288 |
},
|
| 289 |
"blimp_regular_plural_subject_verb_agreement_1": {
|
| 290 |
"acc,none": 0.966,
|
| 291 |
-
"acc_stderr,none": 0.
|
| 292 |
"alias": " - blimp_regular_plural_subject_verb_agreement_1"
|
| 293 |
},
|
| 294 |
"blimp_regular_plural_subject_verb_agreement_2": {
|
| 295 |
"acc,none": 0.91,
|
| 296 |
-
"acc_stderr,none": 0.
|
| 297 |
"alias": " - blimp_regular_plural_subject_verb_agreement_2"
|
| 298 |
},
|
| 299 |
"blimp_sentential_negation_npi_licensor_present": {
|
| 300 |
"acc,none": 0.985,
|
| 301 |
-
"acc_stderr,none": 0.
|
| 302 |
"alias": " - blimp_sentential_negation_npi_licensor_present"
|
| 303 |
},
|
| 304 |
"blimp_sentential_negation_npi_scope": {
|
| 305 |
-
"acc,none": 0.
|
| 306 |
-
"acc_stderr,none": 0.
|
| 307 |
"alias": " - blimp_sentential_negation_npi_scope"
|
| 308 |
},
|
| 309 |
"blimp_sentential_subject_island": {
|
| 310 |
"acc,none": 0.45,
|
| 311 |
-
"acc_stderr,none": 0.
|
| 312 |
"alias": " - blimp_sentential_subject_island"
|
| 313 |
},
|
| 314 |
"blimp_superlative_quantifiers_1": {
|
| 315 |
"acc,none": 0.848,
|
| 316 |
-
"acc_stderr,none": 0.
|
| 317 |
"alias": " - blimp_superlative_quantifiers_1"
|
| 318 |
},
|
| 319 |
"blimp_superlative_quantifiers_2": {
|
| 320 |
"acc,none": 0.746,
|
| 321 |
-
"acc_stderr,none": 0.
|
| 322 |
"alias": " - blimp_superlative_quantifiers_2"
|
| 323 |
},
|
| 324 |
"blimp_tough_vs_raising_1": {
|
|
@@ -328,17 +328,17 @@
|
|
| 328 |
},
|
| 329 |
"blimp_tough_vs_raising_2": {
|
| 330 |
"acc,none": 0.879,
|
| 331 |
-
"acc_stderr,none": 0.
|
| 332 |
"alias": " - blimp_tough_vs_raising_2"
|
| 333 |
},
|
| 334 |
"blimp_transitive": {
|
| 335 |
"acc,none": 0.89,
|
| 336 |
-
"acc_stderr,none": 0.
|
| 337 |
"alias": " - blimp_transitive"
|
| 338 |
},
|
| 339 |
"blimp_wh_island": {
|
| 340 |
"acc,none": 0.759,
|
| 341 |
-
"acc_stderr,none": 0.
|
| 342 |
"alias": " - blimp_wh_island"
|
| 343 |
},
|
| 344 |
"blimp_wh_questions_object_gap": {
|
|
@@ -348,7 +348,7 @@
|
|
| 348 |
},
|
| 349 |
"blimp_wh_questions_subject_gap": {
|
| 350 |
"acc,none": 0.953,
|
| 351 |
-
"acc_stderr,none": 0.
|
| 352 |
"alias": " - blimp_wh_questions_subject_gap"
|
| 353 |
},
|
| 354 |
"blimp_wh_questions_subject_gap_long_distance": {
|
|
@@ -358,17 +358,17 @@
|
|
| 358 |
},
|
| 359 |
"blimp_wh_vs_that_no_gap": {
|
| 360 |
"acc,none": 0.977,
|
| 361 |
-
"acc_stderr,none": 0.
|
| 362 |
"alias": " - blimp_wh_vs_that_no_gap"
|
| 363 |
},
|
| 364 |
"blimp_wh_vs_that_no_gap_long_distance": {
|
| 365 |
-
"acc,none": 0.
|
| 366 |
-
"acc_stderr,none": 0.
|
| 367 |
"alias": " - blimp_wh_vs_that_no_gap_long_distance"
|
| 368 |
},
|
| 369 |
"blimp_wh_vs_that_with_gap": {
|
| 370 |
"acc,none": 0.466,
|
| 371 |
-
"acc_stderr,none": 0.
|
| 372 |
"alias": " - blimp_wh_vs_that_with_gap"
|
| 373 |
},
|
| 374 |
"blimp_wh_vs_that_with_gap_long_distance": {
|
|
@@ -377,28 +377,28 @@
|
|
| 377 |
"alias": " - blimp_wh_vs_that_with_gap_long_distance"
|
| 378 |
},
|
| 379 |
"lambada_openai": {
|
| 380 |
-
"perplexity,none": 5.
|
| 381 |
-
"perplexity_stderr,none": 0.
|
| 382 |
"acc,none": 0.6568988938482437,
|
| 383 |
-
"acc_stderr,none": 0.
|
| 384 |
"alias": " - lambada_openai"
|
| 385 |
},
|
| 386 |
"logiqa": {
|
| 387 |
"acc,none": 0.2457757296466974,
|
| 388 |
-
"acc_stderr,none": 0.
|
| 389 |
"acc_norm,none": 0.29493087557603687,
|
| 390 |
-
"acc_norm_stderr,none": 0.
|
| 391 |
"alias": " - logiqa"
|
| 392 |
},
|
| 393 |
"mmlu": {
|
| 394 |
"acc,none": 0.2525993448226748,
|
| 395 |
-
"acc_stderr,none": 0.
|
| 396 |
"alias": " - mmlu"
|
| 397 |
},
|
| 398 |
"mmlu_humanities": {
|
| 399 |
"alias": " - humanities",
|
| 400 |
"acc,none": 0.24017003188097769,
|
| 401 |
-
"acc_stderr,none": 0.
|
| 402 |
},
|
| 403 |
"mmlu_formal_logic": {
|
| 404 |
"alias": " - formal_logic",
|
|
@@ -408,12 +408,12 @@
|
|
| 408 |
"mmlu_high_school_european_history": {
|
| 409 |
"alias": " - high_school_european_history",
|
| 410 |
"acc,none": 0.23030303030303031,
|
| 411 |
-
"acc_stderr,none": 0.
|
| 412 |
},
|
| 413 |
"mmlu_high_school_us_history": {
|
| 414 |
"alias": " - high_school_us_history",
|
| 415 |
"acc,none": 0.27941176470588236,
|
| 416 |
-
"acc_stderr,none": 0.
|
| 417 |
},
|
| 418 |
"mmlu_high_school_world_history": {
|
| 419 |
"alias": " - high_school_world_history",
|
|
@@ -438,17 +438,17 @@
|
|
| 438 |
"mmlu_moral_disputes": {
|
| 439 |
"alias": " - moral_disputes",
|
| 440 |
"acc,none": 0.21098265895953758,
|
| 441 |
-
"acc_stderr,none": 0.
|
| 442 |
},
|
| 443 |
"mmlu_moral_scenarios": {
|
| 444 |
"alias": " - moral_scenarios",
|
| 445 |
"acc,none": 0.2346368715083799,
|
| 446 |
-
"acc_stderr,none": 0.
|
| 447 |
},
|
| 448 |
"mmlu_philosophy": {
|
| 449 |
"alias": " - philosophy",
|
| 450 |
"acc,none": 0.2540192926045016,
|
| 451 |
-
"acc_stderr,none": 0.
|
| 452 |
},
|
| 453 |
"mmlu_prehistory": {
|
| 454 |
"alias": " - prehistory",
|
|
@@ -458,7 +458,7 @@
|
|
| 458 |
"mmlu_professional_law": {
|
| 459 |
"alias": " - professional_law",
|
| 460 |
"acc,none": 0.24967405475880053,
|
| 461 |
-
"acc_stderr,none": 0.
|
| 462 |
},
|
| 463 |
"mmlu_world_religions": {
|
| 464 |
"alias": " - world_religions",
|
|
@@ -467,8 +467,8 @@
|
|
| 467 |
},
|
| 468 |
"mmlu_other": {
|
| 469 |
"alias": " - other",
|
| 470 |
-
"acc,none": 0.
|
| 471 |
-
"acc_stderr,none": 0.
|
| 472 |
},
|
| 473 |
"mmlu_business_ethics": {
|
| 474 |
"alias": " - business_ethics",
|
|
@@ -478,17 +478,17 @@
|
|
| 478 |
"mmlu_clinical_knowledge": {
|
| 479 |
"alias": " - clinical_knowledge",
|
| 480 |
"acc,none": 0.32075471698113206,
|
| 481 |
-
"acc_stderr,none": 0.
|
| 482 |
},
|
| 483 |
"mmlu_college_medicine": {
|
| 484 |
"alias": " - college_medicine",
|
| 485 |
"acc,none": 0.3236994219653179,
|
| 486 |
-
"acc_stderr,none": 0.
|
| 487 |
},
|
| 488 |
"mmlu_global_facts": {
|
| 489 |
"alias": " - global_facts",
|
| 490 |
"acc,none": 0.2,
|
| 491 |
-
"acc_stderr,none": 0.
|
| 492 |
},
|
| 493 |
"mmlu_human_aging": {
|
| 494 |
"alias": " - human_aging",
|
|
@@ -503,32 +503,32 @@
|
|
| 503 |
"mmlu_marketing": {
|
| 504 |
"alias": " - marketing",
|
| 505 |
"acc,none": 0.2094017094017094,
|
| 506 |
-
"acc_stderr,none": 0.
|
| 507 |
},
|
| 508 |
"mmlu_medical_genetics": {
|
| 509 |
"alias": " - medical_genetics",
|
| 510 |
"acc,none": 0.32,
|
| 511 |
-
"acc_stderr,none": 0.
|
| 512 |
},
|
| 513 |
"mmlu_miscellaneous": {
|
| 514 |
"alias": " - miscellaneous",
|
| 515 |
"acc,none": 0.22349936143039592,
|
| 516 |
-
"acc_stderr,none": 0.
|
| 517 |
},
|
| 518 |
"mmlu_nutrition": {
|
| 519 |
"alias": " - nutrition",
|
| 520 |
"acc,none": 0.30718954248366015,
|
| 521 |
-
"acc_stderr,none": 0.
|
| 522 |
},
|
| 523 |
"mmlu_professional_accounting": {
|
| 524 |
"alias": " - professional_accounting",
|
| 525 |
"acc,none": 0.24822695035460993,
|
| 526 |
-
"acc_stderr,none": 0.
|
| 527 |
},
|
| 528 |
"mmlu_professional_medicine": {
|
| 529 |
"alias": " - professional_medicine",
|
| 530 |
"acc,none": 0.25735294117647056,
|
| 531 |
-
"acc_stderr,none": 0.
|
| 532 |
},
|
| 533 |
"mmlu_virology": {
|
| 534 |
"alias": " - virology",
|
|
@@ -537,18 +537,18 @@
|
|
| 537 |
},
|
| 538 |
"mmlu_social_sciences": {
|
| 539 |
"alias": " - social_sciences",
|
| 540 |
-
"acc,none": 0.
|
| 541 |
-
"acc_stderr,none": 0.
|
| 542 |
},
|
| 543 |
"mmlu_econometrics": {
|
| 544 |
"alias": " - econometrics",
|
| 545 |
"acc,none": 0.2719298245614035,
|
| 546 |
-
"acc_stderr,none": 0.
|
| 547 |
},
|
| 548 |
"mmlu_high_school_geography": {
|
| 549 |
"alias": " - high_school_geography",
|
| 550 |
"acc,none": 0.3333333333333333,
|
| 551 |
-
"acc_stderr,none": 0.
|
| 552 |
},
|
| 553 |
"mmlu_high_school_government_and_politics": {
|
| 554 |
"alias": " - high_school_government_and_politics",
|
|
@@ -558,12 +558,12 @@
|
|
| 558 |
"mmlu_high_school_macroeconomics": {
|
| 559 |
"alias": " - high_school_macroeconomics",
|
| 560 |
"acc,none": 0.258974358974359,
|
| 561 |
-
"acc_stderr,none": 0.
|
| 562 |
},
|
| 563 |
"mmlu_high_school_microeconomics": {
|
| 564 |
"alias": " - high_school_microeconomics",
|
| 565 |
"acc,none": 0.2605042016806723,
|
| 566 |
-
"acc_stderr,none": 0.
|
| 567 |
},
|
| 568 |
"mmlu_high_school_psychology": {
|
| 569 |
"alias": " - high_school_psychology",
|
|
@@ -583,12 +583,12 @@
|
|
| 583 |
"mmlu_public_relations": {
|
| 584 |
"alias": " - public_relations",
|
| 585 |
"acc,none": 0.24545454545454545,
|
| 586 |
-
"acc_stderr,none": 0.
|
| 587 |
},
|
| 588 |
"mmlu_security_studies": {
|
| 589 |
"alias": " - security_studies",
|
| 590 |
"acc,none": 0.2612244897959184,
|
| 591 |
-
"acc_stderr,none": 0.
|
| 592 |
},
|
| 593 |
"mmlu_sociology": {
|
| 594 |
"alias": " - sociology",
|
|
@@ -603,12 +603,12 @@
|
|
| 603 |
"mmlu_stem": {
|
| 604 |
"alias": " - stem",
|
| 605 |
"acc,none": 0.25531240088804313,
|
| 606 |
-
"acc_stderr,none": 0.
|
| 607 |
},
|
| 608 |
"mmlu_abstract_algebra": {
|
| 609 |
"alias": " - abstract_algebra",
|
| 610 |
"acc,none": 0.26,
|
| 611 |
-
"acc_stderr,none": 0.
|
| 612 |
},
|
| 613 |
"mmlu_anatomy": {
|
| 614 |
"alias": " - anatomy",
|
|
@@ -623,12 +623,12 @@
|
|
| 623 |
"mmlu_college_biology": {
|
| 624 |
"alias": " - college_biology",
|
| 625 |
"acc,none": 0.2777777777777778,
|
| 626 |
-
"acc_stderr,none": 0.
|
| 627 |
},
|
| 628 |
"mmlu_college_chemistry": {
|
| 629 |
"alias": " - college_chemistry",
|
| 630 |
"acc,none": 0.34,
|
| 631 |
-
"acc_stderr,none": 0.
|
| 632 |
},
|
| 633 |
"mmlu_college_computer_science": {
|
| 634 |
"alias": " - college_computer_science",
|
|
@@ -638,22 +638,22 @@
|
|
| 638 |
"mmlu_college_mathematics": {
|
| 639 |
"alias": " - college_mathematics",
|
| 640 |
"acc,none": 0.23,
|
| 641 |
-
"acc_stderr,none": 0.
|
| 642 |
},
|
| 643 |
"mmlu_college_physics": {
|
| 644 |
"alias": " - college_physics",
|
| 645 |
"acc,none": 0.2647058823529412,
|
| 646 |
-
"acc_stderr,none": 0.
|
| 647 |
},
|
| 648 |
"mmlu_computer_security": {
|
| 649 |
"alias": " - computer_security",
|
| 650 |
"acc,none": 0.22,
|
| 651 |
-
"acc_stderr,none": 0.
|
| 652 |
},
|
| 653 |
"mmlu_conceptual_physics": {
|
| 654 |
"alias": " - conceptual_physics",
|
| 655 |
"acc,none": 0.18723404255319148,
|
| 656 |
-
"acc_stderr,none": 0.
|
| 657 |
},
|
| 658 |
"mmlu_electrical_engineering": {
|
| 659 |
"alias": " - electrical_engineering",
|
|
@@ -663,17 +663,17 @@
|
|
| 663 |
"mmlu_elementary_mathematics": {
|
| 664 |
"alias": " - elementary_mathematics",
|
| 665 |
"acc,none": 0.2962962962962963,
|
| 666 |
-
"acc_stderr,none": 0.
|
| 667 |
},
|
| 668 |
"mmlu_high_school_biology": {
|
| 669 |
"alias": " - high_school_biology",
|
| 670 |
"acc,none": 0.2903225806451613,
|
| 671 |
-
"acc_stderr,none": 0.
|
| 672 |
},
|
| 673 |
"mmlu_high_school_chemistry": {
|
| 674 |
"alias": " - high_school_chemistry",
|
| 675 |
"acc,none": 0.22167487684729065,
|
| 676 |
-
"acc_stderr,none": 0.
|
| 677 |
},
|
| 678 |
"mmlu_high_school_computer_science": {
|
| 679 |
"alias": " - high_school_computer_science",
|
|
@@ -693,7 +693,7 @@
|
|
| 693 |
"mmlu_high_school_statistics": {
|
| 694 |
"alias": " - high_school_statistics",
|
| 695 |
"acc,none": 0.25925925925925924,
|
| 696 |
-
"acc_stderr,none": 0.
|
| 697 |
},
|
| 698 |
"mmlu_machine_learning": {
|
| 699 |
"alias": " - machine_learning",
|
|
@@ -702,14 +702,14 @@
|
|
| 702 |
},
|
| 703 |
"piqa": {
|
| 704 |
"acc,none": 0.7110990206746464,
|
| 705 |
-
"acc_stderr,none": 0.
|
| 706 |
"acc_norm,none": 0.7132752992383025,
|
| 707 |
-
"acc_norm_stderr,none": 0.
|
| 708 |
"alias": " - piqa"
|
| 709 |
},
|
| 710 |
"sciq": {
|
| 711 |
"acc,none": 0.897,
|
| 712 |
-
"acc_stderr,none": 0.
|
| 713 |
"acc_norm,none": 0.853,
|
| 714 |
"acc_norm_stderr,none": 0.011203415395160333,
|
| 715 |
"alias": " - sciq"
|
|
@@ -725,7 +725,7 @@
|
|
| 725 |
},
|
| 726 |
"winogrande": {
|
| 727 |
"acc,none": 0.5911602209944752,
|
| 728 |
-
"acc_stderr,none": 0.
|
| 729 |
"alias": " - winogrande"
|
| 730 |
},
|
| 731 |
"wsc": {
|
|
@@ -736,56 +736,56 @@
|
|
| 736 |
},
|
| 737 |
"groups": {
|
| 738 |
"pythia": {
|
| 739 |
-
"acc,none": 0.
|
| 740 |
-
"acc_stderr,none": 0.
|
| 741 |
-
"acc_norm,none": 0.
|
| 742 |
-
"acc_norm_stderr,none": 0.
|
| 743 |
"word_perplexity,none": 14.373441237489386,
|
| 744 |
"word_perplexity_stderr,none": "N/A",
|
| 745 |
"byte_perplexity,none": 1.646150916185073,
|
| 746 |
"byte_perplexity_stderr,none": "N/A",
|
| 747 |
"bits_per_byte,none": 0.719096605535433,
|
| 748 |
"bits_per_byte_stderr,none": "N/A",
|
| 749 |
-
"perplexity,none": 5.
|
| 750 |
-
"perplexity_stderr,none": 0.
|
| 751 |
"alias": "pythia"
|
| 752 |
},
|
| 753 |
"ai2_arc": {
|
| 754 |
-
"acc,none": 0.
|
| 755 |
-
"acc_stderr,none": 0.
|
| 756 |
-
"acc_norm,none": 0.
|
| 757 |
-
"acc_norm_stderr,none": 0.
|
| 758 |
"alias": " - ai2_arc"
|
| 759 |
},
|
| 760 |
"blimp": {
|
| 761 |
-
"acc,none": 0.
|
| 762 |
-
"acc_stderr,none": 0.
|
| 763 |
"alias": " - blimp"
|
| 764 |
},
|
| 765 |
"mmlu": {
|
| 766 |
"acc,none": 0.2525993448226748,
|
| 767 |
-
"acc_stderr,none": 0.
|
| 768 |
"alias": " - mmlu"
|
| 769 |
},
|
| 770 |
"mmlu_humanities": {
|
| 771 |
"alias": " - humanities",
|
| 772 |
"acc,none": 0.24017003188097769,
|
| 773 |
-
"acc_stderr,none": 0.
|
| 774 |
},
|
| 775 |
"mmlu_other": {
|
| 776 |
"alias": " - other",
|
| 777 |
-
"acc,none": 0.
|
| 778 |
-
"acc_stderr,none": 0.
|
| 779 |
},
|
| 780 |
"mmlu_social_sciences": {
|
| 781 |
"alias": " - social_sciences",
|
| 782 |
-
"acc,none": 0.
|
| 783 |
-
"acc_stderr,none": 0.
|
| 784 |
},
|
| 785 |
"mmlu_stem": {
|
| 786 |
"alias": " - stem",
|
| 787 |
"acc,none": 0.25531240088804313,
|
| 788 |
-
"acc_stderr,none": 0.
|
| 789 |
}
|
| 790 |
},
|
| 791 |
"configs": {
|
|
@@ -5230,5 +5230,5 @@
|
|
| 5230 |
"bootstrap_iters": 100000,
|
| 5231 |
"gen_kwargs": null
|
| 5232 |
},
|
| 5233 |
-
"git_hash": "
|
| 5234 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"pythia": {
|
| 4 |
+
"acc,none": 0.7164499959853446,
|
| 5 |
+
"acc_stderr,none": 0.1523603503492503,
|
| 6 |
+
"acc_norm,none": 0.4997890478277153,
|
| 7 |
+
"acc_norm_stderr,none": 0.008352942631330008,
|
| 8 |
"word_perplexity,none": 14.373441237489386,
|
| 9 |
"word_perplexity_stderr,none": "N/A",
|
| 10 |
"byte_perplexity,none": 1.646150916185073,
|
| 11 |
"byte_perplexity_stderr,none": "N/A",
|
| 12 |
"bits_per_byte,none": 0.719096605535433,
|
| 13 |
"bits_per_byte_stderr,none": "N/A",
|
| 14 |
+
"perplexity,none": 5.055622995636905,
|
| 15 |
+
"perplexity_stderr,none": 0.11862585441461414,
|
| 16 |
"alias": "pythia"
|
| 17 |
},
|
| 18 |
"ai2_arc": {
|
| 19 |
+
"acc,none": 0.5109921082299888,
|
| 20 |
+
"acc_stderr,none": 0.1060971540262165,
|
| 21 |
+
"acc_norm,none": 0.4926719278466742,
|
| 22 |
+
"acc_norm_stderr,none": 0.07727859037048158,
|
| 23 |
"alias": " - ai2_arc"
|
| 24 |
},
|
| 25 |
"arc_challenge": {
|
| 26 |
"acc,none": 0.28668941979522183,
|
| 27 |
+
"acc_stderr,none": 0.013214986329274772,
|
| 28 |
"acc_norm,none": 0.3302047781569966,
|
| 29 |
+
"acc_norm_stderr,none": 0.013743085603760434,
|
| 30 |
"alias": " - arc_challenge"
|
| 31 |
},
|
| 32 |
"arc_easy": {
|
| 33 |
+
"acc,none": 0.6216329966329966,
|
| 34 |
+
"acc_stderr,none": 0.009951575683331946,
|
| 35 |
+
"acc_norm,none": 0.5728114478114478,
|
| 36 |
+
"acc_norm_stderr,none": 0.010150415974210871,
|
| 37 |
"alias": " - arc_easy"
|
| 38 |
},
|
| 39 |
"blimp": {
|
| 40 |
+
"acc,none": 0.833686567164179,
|
| 41 |
+
"acc_stderr,none": 0.15664937058746153,
|
| 42 |
"alias": " - blimp"
|
| 43 |
},
|
| 44 |
"blimp_adjunct_island": {
|
| 45 |
"acc,none": 0.901,
|
| 46 |
+
"acc_stderr,none": 0.009449248027662744,
|
| 47 |
"alias": " - blimp_adjunct_island"
|
| 48 |
},
|
| 49 |
"blimp_anaphor_gender_agreement": {
|
| 50 |
"acc,none": 0.992,
|
| 51 |
+
"acc_stderr,none": 0.0028185003005045052,
|
| 52 |
"alias": " - blimp_anaphor_gender_agreement"
|
| 53 |
},
|
| 54 |
"blimp_anaphor_number_agreement": {
|
| 55 |
"acc,none": 0.995,
|
| 56 |
+
"acc_stderr,none": 0.002231586874844882,
|
| 57 |
"alias": " - blimp_anaphor_number_agreement"
|
| 58 |
},
|
| 59 |
"blimp_animate_subject_passive": {
|
| 60 |
"acc,none": 0.804,
|
| 61 |
+
"acc_stderr,none": 0.012559527926707365,
|
| 62 |
"alias": " - blimp_animate_subject_passive"
|
| 63 |
},
|
| 64 |
"blimp_animate_subject_trans": {
|
|
|
|
| 68 |
},
|
| 69 |
"blimp_causative": {
|
| 70 |
"acc,none": 0.781,
|
| 71 |
+
"acc_stderr,none": 0.013084731950262007,
|
| 72 |
"alias": " - blimp_causative"
|
| 73 |
},
|
| 74 |
"blimp_complex_NP_island": {
|
|
|
|
| 78 |
},
|
| 79 |
"blimp_coordinate_structure_constraint_complex_left_branch": {
|
| 80 |
"acc,none": 0.744,
|
| 81 |
+
"acc_stderr,none": 0.013807775152234187,
|
| 82 |
"alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
|
| 83 |
},
|
| 84 |
"blimp_coordinate_structure_constraint_object_extraction": {
|
| 85 |
"acc,none": 0.848,
|
| 86 |
+
"acc_stderr,none": 0.011358918303475279,
|
| 87 |
"alias": " - blimp_coordinate_structure_constraint_object_extraction"
|
| 88 |
},
|
| 89 |
"blimp_determiner_noun_agreement_1": {
|
| 90 |
"acc,none": 0.998,
|
| 91 |
+
"acc_stderr,none": 0.0014135055705578208,
|
| 92 |
"alias": " - blimp_determiner_noun_agreement_1"
|
| 93 |
},
|
| 94 |
"blimp_determiner_noun_agreement_2": {
|
| 95 |
"acc,none": 0.991,
|
| 96 |
+
"acc_stderr,none": 0.0029879638431426704,
|
| 97 |
"alias": " - blimp_determiner_noun_agreement_2"
|
| 98 |
},
|
| 99 |
"blimp_determiner_noun_agreement_irregular_1": {
|
| 100 |
"acc,none": 0.961,
|
| 101 |
+
"acc_stderr,none": 0.006125072776426103,
|
| 102 |
"alias": " - blimp_determiner_noun_agreement_irregular_1"
|
| 103 |
},
|
| 104 |
"blimp_determiner_noun_agreement_irregular_2": {
|
| 105 |
"acc,none": 0.956,
|
| 106 |
+
"acc_stderr,none": 0.0064889217984274205,
|
| 107 |
"alias": " - blimp_determiner_noun_agreement_irregular_2"
|
| 108 |
},
|
| 109 |
"blimp_determiner_noun_agreement_with_adj_2": {
|
| 110 |
"acc,none": 0.964,
|
| 111 |
+
"acc_stderr,none": 0.0058939578161655804,
|
| 112 |
"alias": " - blimp_determiner_noun_agreement_with_adj_2"
|
| 113 |
},
|
| 114 |
"blimp_determiner_noun_agreement_with_adj_irregular_1": {
|
| 115 |
"acc,none": 0.939,
|
| 116 |
+
"acc_stderr,none": 0.007572076091557422,
|
| 117 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
|
| 118 |
},
|
| 119 |
"blimp_determiner_noun_agreement_with_adj_irregular_2": {
|
| 120 |
"acc,none": 0.921,
|
| 121 |
+
"acc_stderr,none": 0.008534156773333464,
|
| 122 |
"alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
|
| 123 |
},
|
| 124 |
"blimp_determiner_noun_agreement_with_adjective_1": {
|
| 125 |
"acc,none": 0.981,
|
| 126 |
+
"acc_stderr,none": 0.004319451082910623,
|
| 127 |
"alias": " - blimp_determiner_noun_agreement_with_adjective_1"
|
| 128 |
},
|
| 129 |
"blimp_distractor_agreement_relational_noun": {
|
| 130 |
"acc,none": 0.887,
|
| 131 |
+
"acc_stderr,none": 0.010016552866696832,
|
| 132 |
"alias": " - blimp_distractor_agreement_relational_noun"
|
| 133 |
},
|
| 134 |
"blimp_distractor_agreement_relative_clause": {
|
| 135 |
+
"acc,none": 0.795,
|
| 136 |
+
"acc_stderr,none": 0.012772554096113123,
|
| 137 |
"alias": " - blimp_distractor_agreement_relative_clause"
|
| 138 |
},
|
| 139 |
"blimp_drop_argument": {
|
| 140 |
"acc,none": 0.804,
|
| 141 |
+
"acc_stderr,none": 0.012559527926707392,
|
| 142 |
"alias": " - blimp_drop_argument"
|
| 143 |
},
|
| 144 |
"blimp_ellipsis_n_bar_1": {
|
| 145 |
"acc,none": 0.86,
|
| 146 |
+
"acc_stderr,none": 0.010978183844357808,
|
| 147 |
"alias": " - blimp_ellipsis_n_bar_1"
|
| 148 |
},
|
| 149 |
"blimp_ellipsis_n_bar_2": {
|
| 150 |
"acc,none": 0.885,
|
| 151 |
+
"acc_stderr,none": 0.010093407594904636,
|
| 152 |
"alias": " - blimp_ellipsis_n_bar_2"
|
| 153 |
},
|
| 154 |
"blimp_existential_there_object_raising": {
|
| 155 |
"acc,none": 0.842,
|
| 156 |
+
"acc_stderr,none": 0.011539894677559562,
|
| 157 |
"alias": " - blimp_existential_there_object_raising"
|
| 158 |
},
|
| 159 |
"blimp_existential_there_quantifiers_1": {
|
| 160 |
"acc,none": 0.99,
|
| 161 |
+
"acc_stderr,none": 0.0031480009386767645,
|
| 162 |
"alias": " - blimp_existential_there_quantifiers_1"
|
| 163 |
},
|
| 164 |
"blimp_existential_there_quantifiers_2": {
|
| 165 |
"acc,none": 0.274,
|
| 166 |
+
"acc_stderr,none": 0.014111099288259588,
|
| 167 |
"alias": " - blimp_existential_there_quantifiers_2"
|
| 168 |
},
|
| 169 |
"blimp_existential_there_subject_raising": {
|
| 170 |
"acc,none": 0.931,
|
| 171 |
+
"acc_stderr,none": 0.008018934050315151,
|
| 172 |
"alias": " - blimp_existential_there_subject_raising"
|
| 173 |
},
|
| 174 |
"blimp_expletive_it_object_raising": {
|
| 175 |
"acc,none": 0.827,
|
| 176 |
+
"acc_stderr,none": 0.011967214137559924,
|
| 177 |
"alias": " - blimp_expletive_it_object_raising"
|
| 178 |
},
|
| 179 |
"blimp_inchoative": {
|
| 180 |
+
"acc,none": 0.699,
|
| 181 |
+
"acc_stderr,none": 0.014512395033543155,
|
| 182 |
"alias": " - blimp_inchoative"
|
| 183 |
},
|
| 184 |
"blimp_intransitive": {
|
| 185 |
"acc,none": 0.858,
|
| 186 |
+
"acc_stderr,none": 0.011043457699378222,
|
| 187 |
"alias": " - blimp_intransitive"
|
| 188 |
},
|
| 189 |
"blimp_irregular_past_participle_adjectives": {
|
| 190 |
"acc,none": 0.992,
|
| 191 |
+
"acc_stderr,none": 0.0028185003005045057,
|
| 192 |
"alias": " - blimp_irregular_past_participle_adjectives"
|
| 193 |
},
|
| 194 |
"blimp_irregular_past_participle_verbs": {
|
| 195 |
"acc,none": 0.915,
|
| 196 |
+
"acc_stderr,none": 0.008823426366942317,
|
| 197 |
"alias": " - blimp_irregular_past_participle_verbs"
|
| 198 |
},
|
| 199 |
"blimp_irregular_plural_subject_verb_agreement_1": {
|
| 200 |
"acc,none": 0.934,
|
| 201 |
+
"acc_stderr,none": 0.007855297938697587,
|
| 202 |
"alias": " - blimp_irregular_plural_subject_verb_agreement_1"
|
| 203 |
},
|
| 204 |
"blimp_irregular_plural_subject_verb_agreement_2": {
|
|
|
|
| 208 |
},
|
| 209 |
"blimp_left_branch_island_echo_question": {
|
| 210 |
"acc,none": 0.456,
|
| 211 |
+
"acc_stderr,none": 0.015757928553979172,
|
| 212 |
"alias": " - blimp_left_branch_island_echo_question"
|
| 213 |
},
|
| 214 |
"blimp_left_branch_island_simple_question": {
|
| 215 |
"acc,none": 0.847,
|
| 216 |
+
"acc_stderr,none": 0.011389500459665544,
|
| 217 |
"alias": " - blimp_left_branch_island_simple_question"
|
| 218 |
},
|
| 219 |
"blimp_matrix_question_npi_licensor_present": {
|
| 220 |
"acc,none": 0.708,
|
| 221 |
+
"acc_stderr,none": 0.01438551156347734,
|
| 222 |
"alias": " - blimp_matrix_question_npi_licensor_present"
|
| 223 |
},
|
| 224 |
"blimp_npi_present_1": {
|
| 225 |
"acc,none": 0.57,
|
| 226 |
+
"acc_stderr,none": 0.01566350361015528,
|
| 227 |
"alias": " - blimp_npi_present_1"
|
| 228 |
},
|
| 229 |
"blimp_npi_present_2": {
|
| 230 |
"acc,none": 0.662,
|
| 231 |
+
"acc_stderr,none": 0.014965960710224489,
|
| 232 |
"alias": " - blimp_npi_present_2"
|
| 233 |
},
|
| 234 |
"blimp_only_npi_licensor_present": {
|
|
|
|
| 238 |
},
|
| 239 |
"blimp_only_npi_scope": {
|
| 240 |
"acc,none": 0.726,
|
| 241 |
+
"acc_stderr,none": 0.01411109928825959,
|
| 242 |
"alias": " - blimp_only_npi_scope"
|
| 243 |
},
|
| 244 |
"blimp_passive_1": {
|
| 245 |
+
"acc,none": 0.9,
|
| 246 |
+
"acc_stderr,none": 0.00949157995752503,
|
| 247 |
"alias": " - blimp_passive_1"
|
| 248 |
},
|
| 249 |
"blimp_passive_2": {
|
| 250 |
"acc,none": 0.909,
|
| 251 |
+
"acc_stderr,none": 0.009099549538400227,
|
| 252 |
"alias": " - blimp_passive_2"
|
| 253 |
},
|
| 254 |
"blimp_principle_A_c_command": {
|
| 255 |
+
"acc,none": 0.84,
|
| 256 |
+
"acc_stderr,none": 0.011598902298689007,
|
| 257 |
"alias": " - blimp_principle_A_c_command"
|
| 258 |
},
|
| 259 |
"blimp_principle_A_case_1": {
|
|
|
|
| 263 |
},
|
| 264 |
"blimp_principle_A_case_2": {
|
| 265 |
"acc,none": 0.965,
|
| 266 |
+
"acc_stderr,none": 0.005814534272734945,
|
| 267 |
"alias": " - blimp_principle_A_case_2"
|
| 268 |
},
|
| 269 |
"blimp_principle_A_domain_1": {
|
| 270 |
"acc,none": 0.993,
|
| 271 |
+
"acc_stderr,none": 0.002637794146243779,
|
| 272 |
"alias": " - blimp_principle_A_domain_1"
|
| 273 |
},
|
| 274 |
"blimp_principle_A_domain_2": {
|
| 275 |
"acc,none": 0.903,
|
| 276 |
+
"acc_stderr,none": 0.00936368937324812,
|
| 277 |
"alias": " - blimp_principle_A_domain_2"
|
| 278 |
},
|
| 279 |
"blimp_principle_A_domain_3": {
|
| 280 |
"acc,none": 0.755,
|
| 281 |
+
"acc_stderr,none": 0.013607356839598116,
|
| 282 |
"alias": " - blimp_principle_A_domain_3"
|
| 283 |
},
|
| 284 |
"blimp_principle_A_reconstruction": {
|
| 285 |
"acc,none": 0.469,
|
| 286 |
+
"acc_stderr,none": 0.015788865959539003,
|
| 287 |
"alias": " - blimp_principle_A_reconstruction"
|
| 288 |
},
|
| 289 |
"blimp_regular_plural_subject_verb_agreement_1": {
|
| 290 |
"acc,none": 0.966,
|
| 291 |
+
"acc_stderr,none": 0.005733836139695446,
|
| 292 |
"alias": " - blimp_regular_plural_subject_verb_agreement_1"
|
| 293 |
},
|
| 294 |
"blimp_regular_plural_subject_verb_agreement_2": {
|
| 295 |
"acc,none": 0.91,
|
| 296 |
+
"acc_stderr,none": 0.009054390204866439,
|
| 297 |
"alias": " - blimp_regular_plural_subject_verb_agreement_2"
|
| 298 |
},
|
| 299 |
"blimp_sentential_negation_npi_licensor_present": {
|
| 300 |
"acc,none": 0.985,
|
| 301 |
+
"acc_stderr,none": 0.003845749574502989,
|
| 302 |
"alias": " - blimp_sentential_negation_npi_licensor_present"
|
| 303 |
},
|
| 304 |
"blimp_sentential_negation_npi_scope": {
|
| 305 |
+
"acc,none": 0.759,
|
| 306 |
+
"acc_stderr,none": 0.013531522534515448,
|
| 307 |
"alias": " - blimp_sentential_negation_npi_scope"
|
| 308 |
},
|
| 309 |
"blimp_sentential_subject_island": {
|
| 310 |
"acc,none": 0.45,
|
| 311 |
+
"acc_stderr,none": 0.01574000469338386,
|
| 312 |
"alias": " - blimp_sentential_subject_island"
|
| 313 |
},
|
| 314 |
"blimp_superlative_quantifiers_1": {
|
| 315 |
"acc,none": 0.848,
|
| 316 |
+
"acc_stderr,none": 0.011358918303475287,
|
| 317 |
"alias": " - blimp_superlative_quantifiers_1"
|
| 318 |
},
|
| 319 |
"blimp_superlative_quantifiers_2": {
|
| 320 |
"acc,none": 0.746,
|
| 321 |
+
"acc_stderr,none": 0.01377220656516854,
|
| 322 |
"alias": " - blimp_superlative_quantifiers_2"
|
| 323 |
},
|
| 324 |
"blimp_tough_vs_raising_1": {
|
|
|
|
| 328 |
},
|
| 329 |
"blimp_tough_vs_raising_2": {
|
| 330 |
"acc,none": 0.879,
|
| 331 |
+
"acc_stderr,none": 0.010318210380946092,
|
| 332 |
"alias": " - blimp_tough_vs_raising_2"
|
| 333 |
},
|
| 334 |
"blimp_transitive": {
|
| 335 |
"acc,none": 0.89,
|
| 336 |
+
"acc_stderr,none": 0.009899393819724435,
|
| 337 |
"alias": " - blimp_transitive"
|
| 338 |
},
|
| 339 |
"blimp_wh_island": {
|
| 340 |
"acc,none": 0.759,
|
| 341 |
+
"acc_stderr,none": 0.01353152253451543,
|
| 342 |
"alias": " - blimp_wh_island"
|
| 343 |
},
|
| 344 |
"blimp_wh_questions_object_gap": {
|
|
|
|
| 348 |
},
|
| 349 |
"blimp_wh_questions_subject_gap": {
|
| 350 |
"acc,none": 0.953,
|
| 351 |
+
"acc_stderr,none": 0.006695956678163041,
|
| 352 |
"alias": " - blimp_wh_questions_subject_gap"
|
| 353 |
},
|
| 354 |
"blimp_wh_questions_subject_gap_long_distance": {
|
|
|
|
| 358 |
},
|
| 359 |
"blimp_wh_vs_that_no_gap": {
|
| 360 |
"acc,none": 0.977,
|
| 361 |
+
"acc_stderr,none": 0.004742730594656804,
|
| 362 |
"alias": " - blimp_wh_vs_that_no_gap"
|
| 363 |
},
|
| 364 |
"blimp_wh_vs_that_no_gap_long_distance": {
|
| 365 |
+
"acc,none": 0.964,
|
| 366 |
+
"acc_stderr,none": 0.005893957816165585,
|
| 367 |
"alias": " - blimp_wh_vs_that_no_gap_long_distance"
|
| 368 |
},
|
| 369 |
"blimp_wh_vs_that_with_gap": {
|
| 370 |
"acc,none": 0.466,
|
| 371 |
+
"acc_stderr,none": 0.015782683329937625,
|
| 372 |
"alias": " - blimp_wh_vs_that_with_gap"
|
| 373 |
},
|
| 374 |
"blimp_wh_vs_that_with_gap_long_distance": {
|
|
|
|
| 377 |
"alias": " - blimp_wh_vs_that_with_gap_long_distance"
|
| 378 |
},
|
| 379 |
"lambada_openai": {
|
| 380 |
+
"perplexity,none": 5.055622995636905,
|
| 381 |
+
"perplexity_stderr,none": 0.11862585441461414,
|
| 382 |
"acc,none": 0.6568988938482437,
|
| 383 |
+
"acc_stderr,none": 0.006614124982461041,
|
| 384 |
"alias": " - lambada_openai"
|
| 385 |
},
|
| 386 |
"logiqa": {
|
| 387 |
"acc,none": 0.2457757296466974,
|
| 388 |
+
"acc_stderr,none": 0.016887410894296934,
|
| 389 |
"acc_norm,none": 0.29493087557603687,
|
| 390 |
+
"acc_norm_stderr,none": 0.017886249734104402,
|
| 391 |
"alias": " - logiqa"
|
| 392 |
},
|
| 393 |
"mmlu": {
|
| 394 |
"acc,none": 0.2525993448226748,
|
| 395 |
+
"acc_stderr,none": 0.0414928115354445,
|
| 396 |
"alias": " - mmlu"
|
| 397 |
},
|
| 398 |
"mmlu_humanities": {
|
| 399 |
"alias": " - humanities",
|
| 400 |
"acc,none": 0.24017003188097769,
|
| 401 |
+
"acc_stderr,none": 0.029589768015471602
|
| 402 |
},
|
| 403 |
"mmlu_formal_logic": {
|
| 404 |
"alias": " - formal_logic",
|
|
|
|
| 408 |
"mmlu_high_school_european_history": {
|
| 409 |
"alias": " - high_school_european_history",
|
| 410 |
"acc,none": 0.23030303030303031,
|
| 411 |
+
"acc_stderr,none": 0.032876667586034886
|
| 412 |
},
|
| 413 |
"mmlu_high_school_us_history": {
|
| 414 |
"alias": " - high_school_us_history",
|
| 415 |
"acc,none": 0.27941176470588236,
|
| 416 |
+
"acc_stderr,none": 0.03149328104507957
|
| 417 |
},
|
| 418 |
"mmlu_high_school_world_history": {
|
| 419 |
"alias": " - high_school_world_history",
|
|
|
|
| 438 |
"mmlu_moral_disputes": {
|
| 439 |
"alias": " - moral_disputes",
|
| 440 |
"acc,none": 0.21098265895953758,
|
| 441 |
+
"acc_stderr,none": 0.021966309947043128
|
| 442 |
},
|
| 443 |
"mmlu_moral_scenarios": {
|
| 444 |
"alias": " - moral_scenarios",
|
| 445 |
"acc,none": 0.2346368715083799,
|
| 446 |
+
"acc_stderr,none": 0.014173044098303653
|
| 447 |
},
|
| 448 |
"mmlu_philosophy": {
|
| 449 |
"alias": " - philosophy",
|
| 450 |
"acc,none": 0.2540192926045016,
|
| 451 |
+
"acc_stderr,none": 0.024723861504771693
|
| 452 |
},
|
| 453 |
"mmlu_prehistory": {
|
| 454 |
"alias": " - prehistory",
|
|
|
|
| 458 |
"mmlu_professional_law": {
|
| 459 |
"alias": " - professional_law",
|
| 460 |
"acc,none": 0.24967405475880053,
|
| 461 |
+
"acc_stderr,none": 0.011054538377832317
|
| 462 |
},
|
| 463 |
"mmlu_world_religions": {
|
| 464 |
"alias": " - world_religions",
|
|
|
|
| 467 |
},
|
| 468 |
"mmlu_other": {
|
| 469 |
"alias": " - other",
|
| 470 |
+
"acc,none": 0.2568393949147087,
|
| 471 |
+
"acc_stderr,none": 0.05352976052385703
|
| 472 |
},
|
| 473 |
"mmlu_business_ethics": {
|
| 474 |
"alias": " - business_ethics",
|
|
|
|
| 478 |
"mmlu_clinical_knowledge": {
|
| 479 |
"alias": " - clinical_knowledge",
|
| 480 |
"acc,none": 0.32075471698113206,
|
| 481 |
+
"acc_stderr,none": 0.02872750295788027
|
| 482 |
},
|
| 483 |
"mmlu_college_medicine": {
|
| 484 |
"alias": " - college_medicine",
|
| 485 |
"acc,none": 0.3236994219653179,
|
| 486 |
+
"acc_stderr,none": 0.0356760379963917
|
| 487 |
},
|
| 488 |
"mmlu_global_facts": {
|
| 489 |
"alias": " - global_facts",
|
| 490 |
"acc,none": 0.2,
|
| 491 |
+
"acc_stderr,none": 0.040201512610368445
|
| 492 |
},
|
| 493 |
"mmlu_human_aging": {
|
| 494 |
"alias": " - human_aging",
|
|
|
|
| 503 |
"mmlu_marketing": {
|
| 504 |
"alias": " - marketing",
|
| 505 |
"acc,none": 0.2094017094017094,
|
| 506 |
+
"acc_stderr,none": 0.026655699653922737
|
| 507 |
},
|
| 508 |
"mmlu_medical_genetics": {
|
| 509 |
"alias": " - medical_genetics",
|
| 510 |
"acc,none": 0.32,
|
| 511 |
+
"acc_stderr,none": 0.046882617226215034
|
| 512 |
},
|
| 513 |
"mmlu_miscellaneous": {
|
| 514 |
"alias": " - miscellaneous",
|
| 515 |
"acc,none": 0.22349936143039592,
|
| 516 |
+
"acc_stderr,none": 0.01489723522945071
|
| 517 |
},
|
| 518 |
"mmlu_nutrition": {
|
| 519 |
"alias": " - nutrition",
|
| 520 |
"acc,none": 0.30718954248366015,
|
| 521 |
+
"acc_stderr,none": 0.026415601914388995
|
| 522 |
},
|
| 523 |
"mmlu_professional_accounting": {
|
| 524 |
"alias": " - professional_accounting",
|
| 525 |
"acc,none": 0.24822695035460993,
|
| 526 |
+
"acc_stderr,none": 0.02577001564429038
|
| 527 |
},
|
| 528 |
"mmlu_professional_medicine": {
|
| 529 |
"alias": " - professional_medicine",
|
| 530 |
"acc,none": 0.25735294117647056,
|
| 531 |
+
"acc_stderr,none": 0.0265565194700415
|
| 532 |
},
|
| 533 |
"mmlu_virology": {
|
| 534 |
"alias": " - virology",
|
|
|
|
| 537 |
},
|
| 538 |
"mmlu_social_sciences": {
|
| 539 |
"alias": " - social_sciences",
|
| 540 |
+
"acc,none": 0.2645433864153396,
|
| 541 |
+
"acc_stderr,none": 0.035566784463720184
|
| 542 |
},
|
| 543 |
"mmlu_econometrics": {
|
| 544 |
"alias": " - econometrics",
|
| 545 |
"acc,none": 0.2719298245614035,
|
| 546 |
+
"acc_stderr,none": 0.041857744240220554
|
| 547 |
},
|
| 548 |
"mmlu_high_school_geography": {
|
| 549 |
"alias": " - high_school_geography",
|
| 550 |
"acc,none": 0.3333333333333333,
|
| 551 |
+
"acc_stderr,none": 0.03358618145732523
|
| 552 |
},
|
| 553 |
"mmlu_high_school_government_and_politics": {
|
| 554 |
"alias": " - high_school_government_and_politics",
|
|
|
|
| 558 |
"mmlu_high_school_macroeconomics": {
|
| 559 |
"alias": " - high_school_macroeconomics",
|
| 560 |
"acc,none": 0.258974358974359,
|
| 561 |
+
"acc_stderr,none": 0.02221110681006167
|
| 562 |
},
|
| 563 |
"mmlu_high_school_microeconomics": {
|
| 564 |
"alias": " - high_school_microeconomics",
|
| 565 |
"acc,none": 0.2605042016806723,
|
| 566 |
+
"acc_stderr,none": 0.028510251512341923
|
| 567 |
},
|
| 568 |
"mmlu_high_school_psychology": {
|
| 569 |
"alias": " - high_school_psychology",
|
|
|
|
| 583 |
"mmlu_public_relations": {
|
| 584 |
"alias": " - public_relations",
|
| 585 |
"acc,none": 0.24545454545454545,
|
| 586 |
+
"acc_stderr,none": 0.04122066502878285
|
| 587 |
},
|
| 588 |
"mmlu_security_studies": {
|
| 589 |
"alias": " - security_studies",
|
| 590 |
"acc,none": 0.2612244897959184,
|
| 591 |
+
"acc_stderr,none": 0.028123429335142783
|
| 592 |
},
|
| 593 |
"mmlu_sociology": {
|
| 594 |
"alias": " - sociology",
|
|
|
|
| 603 |
"mmlu_stem": {
|
| 604 |
"alias": " - stem",
|
| 605 |
"acc,none": 0.25531240088804313,
|
| 606 |
+
"acc_stderr,none": 0.04639919683915553
|
| 607 |
},
|
| 608 |
"mmlu_abstract_algebra": {
|
| 609 |
"alias": " - abstract_algebra",
|
| 610 |
"acc,none": 0.26,
|
| 611 |
+
"acc_stderr,none": 0.04408440022768078
|
| 612 |
},
|
| 613 |
"mmlu_anatomy": {
|
| 614 |
"alias": " - anatomy",
|
|
|
|
| 623 |
"mmlu_college_biology": {
|
| 624 |
"alias": " - college_biology",
|
| 625 |
"acc,none": 0.2777777777777778,
|
| 626 |
+
"acc_stderr,none": 0.03745554791462456
|
| 627 |
},
|
| 628 |
"mmlu_college_chemistry": {
|
| 629 |
"alias": " - college_chemistry",
|
| 630 |
"acc,none": 0.34,
|
| 631 |
+
"acc_stderr,none": 0.047609522856952365
|
| 632 |
},
|
| 633 |
"mmlu_college_computer_science": {
|
| 634 |
"alias": " - college_computer_science",
|
|
|
|
| 638 |
"mmlu_college_mathematics": {
|
| 639 |
"alias": " - college_mathematics",
|
| 640 |
"acc,none": 0.23,
|
| 641 |
+
"acc_stderr,none": 0.04229525846816505
|
| 642 |
},
|
| 643 |
"mmlu_college_physics": {
|
| 644 |
"alias": " - college_physics",
|
| 645 |
"acc,none": 0.2647058823529412,
|
| 646 |
+
"acc_stderr,none": 0.04389869956808779
|
| 647 |
},
|
| 648 |
"mmlu_computer_security": {
|
| 649 |
"alias": " - computer_security",
|
| 650 |
"acc,none": 0.22,
|
| 651 |
+
"acc_stderr,none": 0.04163331998932269
|
| 652 |
},
|
| 653 |
"mmlu_conceptual_physics": {
|
| 654 |
"alias": " - conceptual_physics",
|
| 655 |
"acc,none": 0.18723404255319148,
|
| 656 |
+
"acc_stderr,none": 0.025501588341883596
|
| 657 |
},
|
| 658 |
"mmlu_electrical_engineering": {
|
| 659 |
"alias": " - electrical_engineering",
|
|
|
|
| 663 |
"mmlu_elementary_mathematics": {
|
| 664 |
"alias": " - elementary_mathematics",
|
| 665 |
"acc,none": 0.2962962962962963,
|
| 666 |
+
"acc_stderr,none": 0.023517294335963286
|
| 667 |
},
|
| 668 |
"mmlu_high_school_biology": {
|
| 669 |
"alias": " - high_school_biology",
|
| 670 |
"acc,none": 0.2903225806451613,
|
| 671 |
+
"acc_stderr,none": 0.025822106119415898
|
| 672 |
},
|
| 673 |
"mmlu_high_school_chemistry": {
|
| 674 |
"alias": " - high_school_chemistry",
|
| 675 |
"acc,none": 0.22167487684729065,
|
| 676 |
+
"acc_stderr,none": 0.029225575892489596
|
| 677 |
},
|
| 678 |
"mmlu_high_school_computer_science": {
|
| 679 |
"alias": " - high_school_computer_science",
|
|
|
|
| 693 |
"mmlu_high_school_statistics": {
|
| 694 |
"alias": " - high_school_statistics",
|
| 695 |
"acc,none": 0.25925925925925924,
|
| 696 |
+
"acc_stderr,none": 0.02988691054762697
|
| 697 |
},
|
| 698 |
"mmlu_machine_learning": {
|
| 699 |
"alias": " - machine_learning",
|
|
|
|
| 702 |
},
|
| 703 |
"piqa": {
|
| 704 |
"acc,none": 0.7110990206746464,
|
| 705 |
+
"acc_stderr,none": 0.010575111841364898,
|
| 706 |
"acc_norm,none": 0.7132752992383025,
|
| 707 |
+
"acc_norm_stderr,none": 0.010551314503108068,
|
| 708 |
"alias": " - piqa"
|
| 709 |
},
|
| 710 |
"sciq": {
|
| 711 |
"acc,none": 0.897,
|
| 712 |
+
"acc_stderr,none": 0.009616833339695798,
|
| 713 |
"acc_norm,none": 0.853,
|
| 714 |
"acc_norm_stderr,none": 0.011203415395160333,
|
| 715 |
"alias": " - sciq"
|
|
|
|
| 725 |
},
|
| 726 |
"winogrande": {
|
| 727 |
"acc,none": 0.5911602209944752,
|
| 728 |
+
"acc_stderr,none": 0.013816954295135679,
|
| 729 |
"alias": " - winogrande"
|
| 730 |
},
|
| 731 |
"wsc": {
|
|
|
|
| 736 |
},
|
| 737 |
"groups": {
|
| 738 |
"pythia": {
|
| 739 |
+
"acc,none": 0.7164499959853446,
|
| 740 |
+
"acc_stderr,none": 0.1523603503492503,
|
| 741 |
+
"acc_norm,none": 0.4997890478277153,
|
| 742 |
+
"acc_norm_stderr,none": 0.008352942631330008,
|
| 743 |
"word_perplexity,none": 14.373441237489386,
|
| 744 |
"word_perplexity_stderr,none": "N/A",
|
| 745 |
"byte_perplexity,none": 1.646150916185073,
|
| 746 |
"byte_perplexity_stderr,none": "N/A",
|
| 747 |
"bits_per_byte,none": 0.719096605535433,
|
| 748 |
"bits_per_byte_stderr,none": "N/A",
|
| 749 |
+
"perplexity,none": 5.055622995636905,
|
| 750 |
+
"perplexity_stderr,none": 0.11862585441461414,
|
| 751 |
"alias": "pythia"
|
| 752 |
},
|
| 753 |
"ai2_arc": {
|
| 754 |
+
"acc,none": 0.5109921082299888,
|
| 755 |
+
"acc_stderr,none": 0.1060971540262165,
|
| 756 |
+
"acc_norm,none": 0.4926719278466742,
|
| 757 |
+
"acc_norm_stderr,none": 0.07727859037048158,
|
| 758 |
"alias": " - ai2_arc"
|
| 759 |
},
|
| 760 |
"blimp": {
|
| 761 |
+
"acc,none": 0.833686567164179,
|
| 762 |
+
"acc_stderr,none": 0.15664937058746153,
|
| 763 |
"alias": " - blimp"
|
| 764 |
},
|
| 765 |
"mmlu": {
|
| 766 |
"acc,none": 0.2525993448226748,
|
| 767 |
+
"acc_stderr,none": 0.0414928115354445,
|
| 768 |
"alias": " - mmlu"
|
| 769 |
},
|
| 770 |
"mmlu_humanities": {
|
| 771 |
"alias": " - humanities",
|
| 772 |
"acc,none": 0.24017003188097769,
|
| 773 |
+
"acc_stderr,none": 0.029589768015471602
|
| 774 |
},
|
| 775 |
"mmlu_other": {
|
| 776 |
"alias": " - other",
|
| 777 |
+
"acc,none": 0.2568393949147087,
|
| 778 |
+
"acc_stderr,none": 0.05352976052385703
|
| 779 |
},
|
| 780 |
"mmlu_social_sciences": {
|
| 781 |
"alias": " - social_sciences",
|
| 782 |
+
"acc,none": 0.2645433864153396,
|
| 783 |
+
"acc_stderr,none": 0.035566784463720184
|
| 784 |
},
|
| 785 |
"mmlu_stem": {
|
| 786 |
"alias": " - stem",
|
| 787 |
"acc,none": 0.25531240088804313,
|
| 788 |
+
"acc_stderr,none": 0.04639919683915553
|
| 789 |
}
|
| 790 |
},
|
| 791 |
"configs": {
|
|
|
|
| 5230 |
"bootstrap_iters": 100000,
|
| 5231 |
"gen_kwargs": null
|
| 5232 |
},
|
| 5233 |
+
"git_hash": "71d574c"
|
| 5234 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ceae1375e3645f4e9c6ff53cafc2759350e8221dbf00e4e5cb68b1bb5a909a58
|
| 3 |
+
size 368147
|
lm-eval-output/RWKV/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"results": {
|
| 3 |
+
"record": {
|
| 4 |
+
"f1,none": 0.26163523828089236,
|
| 5 |
+
"f1_stderr,none": 0.00436443954071801,
|
| 6 |
+
"em,none": 0.254,
|
| 7 |
+
"em_stderr,none": 0.00435319365862602,
|
| 8 |
+
"alias": "record"
|
| 9 |
+
}
|
| 10 |
+
},
|
| 11 |
+
"configs": {
|
| 12 |
+
"record": {
|
| 13 |
+
"task": "record",
|
| 14 |
+
"group": [
|
| 15 |
+
"super-glue-lm-eval-v1"
|
| 16 |
+
],
|
| 17 |
+
"dataset_path": "super_glue",
|
| 18 |
+
"dataset_name": "record",
|
| 19 |
+
"training_split": "train",
|
| 20 |
+
"validation_split": "validation",
|
| 21 |
+
"doc_to_text": "def doc_to_text(doc):\n initial_text, *highlights = doc[\"passage\"].strip().split(\"\\n@highlight\\n\")\n text = initial_text + \"\\n\\n\"\n for highlight in highlights:\n text += f\" - {highlight}.\\n\"\n return text\n",
|
| 22 |
+
"doc_to_target": "{{answers}}",
|
| 23 |
+
"doc_to_choice": "{{entities}}",
|
| 24 |
+
"process_results": "def process_results(doc, results):\n # ReCoRD's evaluation is actually deceptively simple:\n # - Pick the maximum likelihood prediction entity\n # - Evaluate the accuracy and token F1 PER EXAMPLE\n # - Average over all examples\n max_idx = np.argmax(np.array([result[0] for result in results]))\n\n prediction = doc[\"entities\"][max_idx]\n gold_label_set = doc[\"answers\"]\n f1 = metric_max_over_ground_truths(\n squad_metrics.compute_f1, prediction, gold_label_set\n )\n em = metric_max_over_ground_truths(\n squad_metrics.compute_exact, prediction, gold_label_set\n )\n\n return {\n \"f1\": f1,\n \"em\": em,\n }\n",
|
| 25 |
+
"description": "",
|
| 26 |
+
"target_delimiter": " ",
|
| 27 |
+
"fewshot_delimiter": "\n\n",
|
| 28 |
+
"metric_list": [
|
| 29 |
+
{
|
| 30 |
+
"metric": "f1",
|
| 31 |
+
"aggregation": "mean"
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"metric": "em",
|
| 35 |
+
"higher_is_better": true,
|
| 36 |
+
"aggregation": "mean"
|
| 37 |
+
}
|
| 38 |
+
],
|
| 39 |
+
"output_type": "multiple_choice",
|
| 40 |
+
"repeats": 1,
|
| 41 |
+
"should_decontaminate": false,
|
| 42 |
+
"metadata": {
|
| 43 |
+
"version": 1.0
|
| 44 |
+
}
|
| 45 |
+
}
|
| 46 |
+
},
|
| 47 |
+
"versions": {
|
| 48 |
+
"record": 1.0
|
| 49 |
+
},
|
| 50 |
+
"n-shot": {
|
| 51 |
+
"record": 0
|
| 52 |
+
},
|
| 53 |
+
"config": {
|
| 54 |
+
"model": "hf",
|
| 55 |
+
"model_args": "pretrained=RWKV/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
|
| 56 |
+
"batch_size": "auto",
|
| 57 |
+
"batch_sizes": [
|
| 58 |
+
32
|
| 59 |
+
],
|
| 60 |
+
"device": null,
|
| 61 |
+
"use_cache": null,
|
| 62 |
+
"limit": null,
|
| 63 |
+
"bootstrap_iters": 100000,
|
| 64 |
+
"gen_kwargs": null
|
| 65 |
+
},
|
| 66 |
+
"git_hash": "71d574c"
|
| 67 |
+
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8c28422d97169c12f4849b52f62cb600ad1f5b77e5b7caace1a43029e579f67f
|
| 3 |
+
size 69615
|
lm-eval-output/RWKV/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"sciq": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"acc_norm,none": 0.853,
|
| 7 |
-
"acc_norm_stderr,none": 0.
|
| 8 |
"alias": "sciq"
|
| 9 |
}
|
| 10 |
},
|
|
@@ -61,5 +61,5 @@
|
|
| 61 |
"bootstrap_iters": 100000,
|
| 62 |
"gen_kwargs": null
|
| 63 |
},
|
| 64 |
-
"git_hash": "
|
| 65 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"sciq": {
|
| 4 |
+
"acc,none": 0.898,
|
| 5 |
+
"acc_stderr,none": 0.009575368801653928,
|
| 6 |
"acc_norm,none": 0.853,
|
| 7 |
+
"acc_norm_stderr,none": 0.011203415395160335,
|
| 8 |
"alias": "sciq"
|
| 9 |
}
|
| 10 |
},
|
|
|
|
| 61 |
"bootstrap_iters": 100000,
|
| 62 |
"gen_kwargs": null
|
| 63 |
},
|
| 64 |
+
"git_hash": "71d574c"
|
| 65 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cd21075bfc267263e571741b2d802c1c80a968e65a4dfbaef27068a6bcfe1942
|
| 3 |
+
size 48581
|
lm-eval-output/RWKV/rwkv-5-world-1b5/triviaqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"results": {
|
| 3 |
+
"triviaqa": {
|
| 4 |
+
"exact_match,remove_whitespace": 0.006966116807846634,
|
| 5 |
+
"exact_match_stderr,remove_whitespace": 0.0006209117540907837,
|
| 6 |
+
"alias": "triviaqa"
|
| 7 |
+
}
|
| 8 |
+
},
|
| 9 |
+
"configs": {
|
| 10 |
+
"triviaqa": {
|
| 11 |
+
"task": "triviaqa",
|
| 12 |
+
"dataset_path": "trivia_qa",
|
| 13 |
+
"dataset_name": "rc.nocontext",
|
| 14 |
+
"training_split": "train",
|
| 15 |
+
"validation_split": "validation",
|
| 16 |
+
"doc_to_text": "Question: {{question}}?\nAnswer:",
|
| 17 |
+
"doc_to_target": "{{answer.aliases}}",
|
| 18 |
+
"description": "",
|
| 19 |
+
"target_delimiter": " ",
|
| 20 |
+
"fewshot_delimiter": "\n\n",
|
| 21 |
+
"metric_list": [
|
| 22 |
+
{
|
| 23 |
+
"metric": "exact_match",
|
| 24 |
+
"aggregation": "mean",
|
| 25 |
+
"higher_is_better": true,
|
| 26 |
+
"ignore_case": true,
|
| 27 |
+
"ignore_punctuation": true
|
| 28 |
+
}
|
| 29 |
+
],
|
| 30 |
+
"output_type": "generate_until",
|
| 31 |
+
"generation_kwargs": {
|
| 32 |
+
"until": [
|
| 33 |
+
"\n",
|
| 34 |
+
".",
|
| 35 |
+
","
|
| 36 |
+
],
|
| 37 |
+
"do_sample": false,
|
| 38 |
+
"temperature": 0.0
|
| 39 |
+
},
|
| 40 |
+
"repeats": 1,
|
| 41 |
+
"filter_list": [
|
| 42 |
+
{
|
| 43 |
+
"name": "remove_whitespace",
|
| 44 |
+
"filter": [
|
| 45 |
+
{
|
| 46 |
+
"function": "remove_whitespace"
|
| 47 |
+
},
|
| 48 |
+
{
|
| 49 |
+
"function": "take_first"
|
| 50 |
+
}
|
| 51 |
+
]
|
| 52 |
+
}
|
| 53 |
+
],
|
| 54 |
+
"should_decontaminate": true,
|
| 55 |
+
"doc_to_decontamination_query": "question",
|
| 56 |
+
"metadata": {
|
| 57 |
+
"version": 3.0
|
| 58 |
+
}
|
| 59 |
+
}
|
| 60 |
+
},
|
| 61 |
+
"versions": {
|
| 62 |
+
"triviaqa": 3.0
|
| 63 |
+
},
|
| 64 |
+
"n-shot": {
|
| 65 |
+
"triviaqa": 0
|
| 66 |
+
},
|
| 67 |
+
"config": {
|
| 68 |
+
"model": "hf",
|
| 69 |
+
"model_args": "pretrained=RWKV/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
|
| 70 |
+
"batch_size": "auto",
|
| 71 |
+
"batch_sizes": [],
|
| 72 |
+
"device": null,
|
| 73 |
+
"use_cache": null,
|
| 74 |
+
"limit": null,
|
| 75 |
+
"bootstrap_iters": 100000,
|
| 76 |
+
"gen_kwargs": null
|
| 77 |
+
},
|
| 78 |
+
"git_hash": "71d574c"
|
| 79 |
+
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/triviaqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:93cac118cda9133183c123723a7e2c06bf76628ee6b7631fd6feacae87c39d42
|
| 3 |
+
size 364245
|
lm-eval-output/RWKV/rwkv-5-world-1b5/truthfulqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,100 +1,100 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"truthfulqa": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
-
"bleu_max,none": 22.
|
| 7 |
-
"bleu_max_stderr,none": 0.
|
| 8 |
-
"bleu_acc,none": 0.
|
| 9 |
-
"bleu_acc_stderr,none": 0.
|
| 10 |
-
"bleu_diff,none": -5.
|
| 11 |
-
"bleu_diff_stderr,none": 0.
|
| 12 |
-
"rouge1_max,none": 46.
|
| 13 |
-
"rouge1_max_stderr,none": 0.
|
| 14 |
-
"rouge1_acc,none": 0.
|
| 15 |
-
"rouge1_acc_stderr,none": 0.
|
| 16 |
-
"rouge1_diff,none": -7.
|
| 17 |
-
"rouge1_diff_stderr,none": 0.
|
| 18 |
-
"rouge2_max,none": 29.
|
| 19 |
-
"rouge2_max_stderr,none": 0.
|
| 20 |
-
"rouge2_acc,none": 0.
|
| 21 |
-
"rouge2_acc_stderr,none": 0.
|
| 22 |
-
"rouge2_diff,none": -9.
|
| 23 |
-
"rouge2_diff_stderr,none": 0.
|
| 24 |
-
"rougeL_max,none": 43.
|
| 25 |
-
"rougeL_max_stderr,none": 0.
|
| 26 |
-
"rougeL_acc,none": 0.
|
| 27 |
-
"rougeL_acc_stderr,none": 0.
|
| 28 |
-
"rougeL_diff,none": -7.
|
| 29 |
-
"rougeL_diff_stderr,none": 0.
|
| 30 |
"alias": "truthfulqa"
|
| 31 |
},
|
| 32 |
"truthfulqa_gen": {
|
| 33 |
-
"bleu_max,none": 22.
|
| 34 |
-
"bleu_max_stderr,none": 0.
|
| 35 |
-
"bleu_acc,none": 0.
|
| 36 |
-
"bleu_acc_stderr,none": 0.
|
| 37 |
-
"bleu_diff,none": -5.
|
| 38 |
-
"bleu_diff_stderr,none": 0.
|
| 39 |
-
"rouge1_max,none": 46.
|
| 40 |
-
"rouge1_max_stderr,none": 0.
|
| 41 |
-
"rouge1_acc,none": 0.
|
| 42 |
-
"rouge1_acc_stderr,none": 0.
|
| 43 |
-
"rouge1_diff,none": -7.
|
| 44 |
-
"rouge1_diff_stderr,none": 0.
|
| 45 |
-
"rouge2_max,none": 29.
|
| 46 |
-
"rouge2_max_stderr,none": 0.
|
| 47 |
-
"rouge2_acc,none": 0.
|
| 48 |
-
"rouge2_acc_stderr,none": 0.
|
| 49 |
-
"rouge2_diff,none": -9.
|
| 50 |
-
"rouge2_diff_stderr,none": 0.
|
| 51 |
-
"rougeL_max,none": 43.
|
| 52 |
-
"rougeL_max_stderr,none": 0.
|
| 53 |
-
"rougeL_acc,none": 0.
|
| 54 |
-
"rougeL_acc_stderr,none": 0.
|
| 55 |
-
"rougeL_diff,none": -7.
|
| 56 |
-
"rougeL_diff_stderr,none": 0.
|
| 57 |
"alias": " - truthfulqa_gen"
|
| 58 |
},
|
| 59 |
"truthfulqa_mc1": {
|
| 60 |
"acc,none": 0.2252141982864137,
|
| 61 |
-
"acc_stderr,none": 0.
|
| 62 |
"alias": " - truthfulqa_mc1"
|
| 63 |
},
|
| 64 |
"truthfulqa_mc2": {
|
| 65 |
-
"acc,none": 0.
|
| 66 |
-
"acc_stderr,none": 0.
|
| 67 |
"alias": " - truthfulqa_mc2"
|
| 68 |
}
|
| 69 |
},
|
| 70 |
"groups": {
|
| 71 |
"truthfulqa": {
|
| 72 |
-
"acc,none": 0.
|
| 73 |
-
"acc_stderr,none": 0.
|
| 74 |
-
"bleu_max,none": 22.
|
| 75 |
-
"bleu_max_stderr,none": 0.
|
| 76 |
-
"bleu_acc,none": 0.
|
| 77 |
-
"bleu_acc_stderr,none": 0.
|
| 78 |
-
"bleu_diff,none": -5.
|
| 79 |
-
"bleu_diff_stderr,none": 0.
|
| 80 |
-
"rouge1_max,none": 46.
|
| 81 |
-
"rouge1_max_stderr,none": 0.
|
| 82 |
-
"rouge1_acc,none": 0.
|
| 83 |
-
"rouge1_acc_stderr,none": 0.
|
| 84 |
-
"rouge1_diff,none": -7.
|
| 85 |
-
"rouge1_diff_stderr,none": 0.
|
| 86 |
-
"rouge2_max,none": 29.
|
| 87 |
-
"rouge2_max_stderr,none": 0.
|
| 88 |
-
"rouge2_acc,none": 0.
|
| 89 |
-
"rouge2_acc_stderr,none": 0.
|
| 90 |
-
"rouge2_diff,none": -9.
|
| 91 |
-
"rouge2_diff_stderr,none": 0.
|
| 92 |
-
"rougeL_max,none": 43.
|
| 93 |
-
"rougeL_max_stderr,none": 0.
|
| 94 |
-
"rougeL_acc,none": 0.
|
| 95 |
-
"rougeL_acc_stderr,none": 0.
|
| 96 |
-
"rougeL_diff,none": -7.
|
| 97 |
-
"rougeL_diff_stderr,none": 0.
|
| 98 |
"alias": "truthfulqa"
|
| 99 |
}
|
| 100 |
},
|
|
@@ -278,5 +278,5 @@
|
|
| 278 |
"bootstrap_iters": 100000,
|
| 279 |
"gen_kwargs": null
|
| 280 |
},
|
| 281 |
-
"git_hash": "
|
| 282 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"truthfulqa": {
|
| 4 |
+
"acc,none": 0.3063158582660005,
|
| 5 |
+
"acc_stderr,none": 0.001848093057966781,
|
| 6 |
+
"bleu_max,none": 22.38730675339667,
|
| 7 |
+
"bleu_max_stderr,none": 0.7389238198401803,
|
| 8 |
+
"bleu_acc,none": 0.32558139534883723,
|
| 9 |
+
"bleu_acc_stderr,none": 0.016403989469907832,
|
| 10 |
+
"bleu_diff,none": -5.0840030736208695,
|
| 11 |
+
"bleu_diff_stderr,none": 0.7436958751836921,
|
| 12 |
+
"rouge1_max,none": 46.22824669996849,
|
| 13 |
+
"rouge1_max_stderr,none": 0.8675449648324991,
|
| 14 |
+
"rouge1_acc,none": 0.30354957160342716,
|
| 15 |
+
"rouge1_acc_stderr,none": 0.016095884155386847,
|
| 16 |
+
"rouge1_diff,none": -7.423724096321315,
|
| 17 |
+
"rouge1_diff_stderr,none": 0.8671448522471581,
|
| 18 |
+
"rouge2_max,none": 29.615032643590446,
|
| 19 |
+
"rouge2_max_stderr,none": 0.9692284831936309,
|
| 20 |
+
"rouge2_acc,none": 0.24112607099143207,
|
| 21 |
+
"rouge2_acc_stderr,none": 0.014974827279752334,
|
| 22 |
+
"rouge2_diff,none": -9.046616928113313,
|
| 23 |
+
"rouge2_diff_stderr,none": 0.9974339348311689,
|
| 24 |
+
"rougeL_max,none": 43.55679318056222,
|
| 25 |
+
"rougeL_max_stderr,none": 0.8780703189940147,
|
| 26 |
+
"rougeL_acc,none": 0.29008567931456547,
|
| 27 |
+
"rougeL_acc_stderr,none": 0.01588623687420952,
|
| 28 |
+
"rougeL_diff,none": -7.582058247484365,
|
| 29 |
+
"rougeL_diff_stderr,none": 0.8696895825292742,
|
| 30 |
"alias": "truthfulqa"
|
| 31 |
},
|
| 32 |
"truthfulqa_gen": {
|
| 33 |
+
"bleu_max,none": 22.38730675339667,
|
| 34 |
+
"bleu_max_stderr,none": 0.7389238198401803,
|
| 35 |
+
"bleu_acc,none": 0.32558139534883723,
|
| 36 |
+
"bleu_acc_stderr,none": 0.016403989469907832,
|
| 37 |
+
"bleu_diff,none": -5.0840030736208695,
|
| 38 |
+
"bleu_diff_stderr,none": 0.7436958751836921,
|
| 39 |
+
"rouge1_max,none": 46.22824669996849,
|
| 40 |
+
"rouge1_max_stderr,none": 0.8675449648324991,
|
| 41 |
+
"rouge1_acc,none": 0.30354957160342716,
|
| 42 |
+
"rouge1_acc_stderr,none": 0.016095884155386847,
|
| 43 |
+
"rouge1_diff,none": -7.423724096321315,
|
| 44 |
+
"rouge1_diff_stderr,none": 0.8671448522471581,
|
| 45 |
+
"rouge2_max,none": 29.615032643590446,
|
| 46 |
+
"rouge2_max_stderr,none": 0.9692284831936309,
|
| 47 |
+
"rouge2_acc,none": 0.24112607099143207,
|
| 48 |
+
"rouge2_acc_stderr,none": 0.014974827279752334,
|
| 49 |
+
"rouge2_diff,none": -9.046616928113313,
|
| 50 |
+
"rouge2_diff_stderr,none": 0.9974339348311689,
|
| 51 |
+
"rougeL_max,none": 43.55679318056222,
|
| 52 |
+
"rougeL_max_stderr,none": 0.8780703189940147,
|
| 53 |
+
"rougeL_acc,none": 0.29008567931456547,
|
| 54 |
+
"rougeL_acc_stderr,none": 0.01588623687420952,
|
| 55 |
+
"rougeL_diff,none": -7.582058247484365,
|
| 56 |
+
"rougeL_diff_stderr,none": 0.8696895825292742,
|
| 57 |
"alias": " - truthfulqa_gen"
|
| 58 |
},
|
| 59 |
"truthfulqa_mc1": {
|
| 60 |
"acc,none": 0.2252141982864137,
|
| 61 |
+
"acc_stderr,none": 0.014623240768023505,
|
| 62 |
"alias": " - truthfulqa_mc1"
|
| 63 |
},
|
| 64 |
"truthfulqa_mc2": {
|
| 65 |
+
"acc,none": 0.38741751824558723,
|
| 66 |
+
"acc_stderr,none": 0.013850697506151954,
|
| 67 |
"alias": " - truthfulqa_mc2"
|
| 68 |
}
|
| 69 |
},
|
| 70 |
"groups": {
|
| 71 |
"truthfulqa": {
|
| 72 |
+
"acc,none": 0.3063158582660005,
|
| 73 |
+
"acc_stderr,none": 0.001848093057966781,
|
| 74 |
+
"bleu_max,none": 22.38730675339667,
|
| 75 |
+
"bleu_max_stderr,none": 0.7389238198401803,
|
| 76 |
+
"bleu_acc,none": 0.32558139534883723,
|
| 77 |
+
"bleu_acc_stderr,none": 0.016403989469907832,
|
| 78 |
+
"bleu_diff,none": -5.0840030736208695,
|
| 79 |
+
"bleu_diff_stderr,none": 0.7436958751836921,
|
| 80 |
+
"rouge1_max,none": 46.22824669996849,
|
| 81 |
+
"rouge1_max_stderr,none": 0.8675449648324991,
|
| 82 |
+
"rouge1_acc,none": 0.30354957160342716,
|
| 83 |
+
"rouge1_acc_stderr,none": 0.016095884155386847,
|
| 84 |
+
"rouge1_diff,none": -7.423724096321315,
|
| 85 |
+
"rouge1_diff_stderr,none": 0.8671448522471581,
|
| 86 |
+
"rouge2_max,none": 29.615032643590446,
|
| 87 |
+
"rouge2_max_stderr,none": 0.9692284831936309,
|
| 88 |
+
"rouge2_acc,none": 0.24112607099143207,
|
| 89 |
+
"rouge2_acc_stderr,none": 0.014974827279752334,
|
| 90 |
+
"rouge2_diff,none": -9.046616928113313,
|
| 91 |
+
"rouge2_diff_stderr,none": 0.9974339348311689,
|
| 92 |
+
"rougeL_max,none": 43.55679318056222,
|
| 93 |
+
"rougeL_max_stderr,none": 0.8780703189940147,
|
| 94 |
+
"rougeL_acc,none": 0.29008567931456547,
|
| 95 |
+
"rougeL_acc_stderr,none": 0.01588623687420952,
|
| 96 |
+
"rougeL_diff,none": -7.582058247484365,
|
| 97 |
+
"rougeL_diff_stderr,none": 0.8696895825292742,
|
| 98 |
"alias": "truthfulqa"
|
| 99 |
}
|
| 100 |
},
|
|
|
|
| 278 |
"bootstrap_iters": 100000,
|
| 279 |
"gen_kwargs": null
|
| 280 |
},
|
| 281 |
+
"git_hash": "71d574c"
|
| 282 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/truthfulqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:979200d4620c6464a9d9c5e4d52723a091e85ec4f174c2b7a7923e0fda36d562
|
| 3 |
+
size 593103
|
lm-eval-output/RWKV/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"winogrande": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "winogrande"
|
| 7 |
}
|
| 8 |
},
|
|
@@ -54,5 +54,5 @@
|
|
| 54 |
"bootstrap_iters": 100000,
|
| 55 |
"gen_kwargs": null
|
| 56 |
},
|
| 57 |
-
"git_hash": "
|
| 58 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"winogrande": {
|
| 4 |
+
"acc,none": 0.5927387529597474,
|
| 5 |
+
"acc_stderr,none": 0.013808654122417831,
|
| 6 |
"alias": "winogrande"
|
| 7 |
}
|
| 8 |
},
|
|
|
|
| 54 |
"bootstrap_iters": 100000,
|
| 55 |
"gen_kwargs": null
|
| 56 |
},
|
| 57 |
+
"git_hash": "71d574c"
|
| 58 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5906664c75f7af3cc8ffb59a273e3ad873e8b1d6157bbcb7b069b77c20867138
|
| 3 |
+
size 28941
|
lm-eval-output/RWKV/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,70 +1,70 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"xcopa": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "xcopa"
|
| 7 |
},
|
| 8 |
"xcopa_et": {
|
| 9 |
-
"acc,none": 0.
|
| 10 |
-
"acc_stderr,none": 0.
|
| 11 |
"alias": " - xcopa_et"
|
| 12 |
},
|
| 13 |
"xcopa_ht": {
|
| 14 |
"acc,none": 0.508,
|
| 15 |
-
"acc_stderr,none": 0.
|
| 16 |
"alias": " - xcopa_ht"
|
| 17 |
},
|
| 18 |
"xcopa_id": {
|
| 19 |
-
"acc,none": 0.
|
| 20 |
-
"acc_stderr,none": 0.
|
| 21 |
"alias": " - xcopa_id"
|
| 22 |
},
|
| 23 |
"xcopa_it": {
|
| 24 |
"acc,none": 0.638,
|
| 25 |
-
"acc_stderr,none": 0.
|
| 26 |
"alias": " - xcopa_it"
|
| 27 |
},
|
| 28 |
"xcopa_qu": {
|
| 29 |
-
"acc,none": 0.
|
| 30 |
-
"acc_stderr,none": 0.
|
| 31 |
"alias": " - xcopa_qu"
|
| 32 |
},
|
| 33 |
"xcopa_sw": {
|
| 34 |
-
"acc,none": 0.
|
| 35 |
-
"acc_stderr,none": 0.
|
| 36 |
"alias": " - xcopa_sw"
|
| 37 |
},
|
| 38 |
"xcopa_ta": {
|
| 39 |
-
"acc,none": 0.
|
| 40 |
-
"acc_stderr,none": 0.
|
| 41 |
"alias": " - xcopa_ta"
|
| 42 |
},
|
| 43 |
"xcopa_th": {
|
| 44 |
"acc,none": 0.566,
|
| 45 |
-
"acc_stderr,none": 0.
|
| 46 |
"alias": " - xcopa_th"
|
| 47 |
},
|
| 48 |
"xcopa_tr": {
|
| 49 |
-
"acc,none": 0.
|
| 50 |
-
"acc_stderr,none": 0.
|
| 51 |
"alias": " - xcopa_tr"
|
| 52 |
},
|
| 53 |
"xcopa_vi": {
|
| 54 |
-
"acc,none": 0.
|
| 55 |
-
"acc_stderr,none": 0.
|
| 56 |
"alias": " - xcopa_vi"
|
| 57 |
},
|
| 58 |
"xcopa_zh": {
|
| 59 |
-
"acc,none": 0.
|
| 60 |
-
"acc_stderr,none": 0.
|
| 61 |
"alias": " - xcopa_zh"
|
| 62 |
}
|
| 63 |
},
|
| 64 |
"groups": {
|
| 65 |
"xcopa": {
|
| 66 |
-
"acc,none": 0.
|
| 67 |
-
"acc_stderr,none": 0.
|
| 68 |
"alias": "xcopa"
|
| 69 |
}
|
| 70 |
},
|
|
@@ -76,7 +76,7 @@
|
|
| 76 |
"dataset_name": "et",
|
| 77 |
"validation_split": "validation",
|
| 78 |
"test_split": "test",
|
| 79 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 80 |
"doc_to_target": "label",
|
| 81 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 82 |
"description": "",
|
|
@@ -101,7 +101,7 @@
|
|
| 101 |
"dataset_name": "ht",
|
| 102 |
"validation_split": "validation",
|
| 103 |
"test_split": "test",
|
| 104 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 105 |
"doc_to_target": "label",
|
| 106 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 107 |
"description": "",
|
|
@@ -126,7 +126,7 @@
|
|
| 126 |
"dataset_name": "id",
|
| 127 |
"validation_split": "validation",
|
| 128 |
"test_split": "test",
|
| 129 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 130 |
"doc_to_target": "label",
|
| 131 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 132 |
"description": "",
|
|
@@ -151,7 +151,7 @@
|
|
| 151 |
"dataset_name": "it",
|
| 152 |
"validation_split": "validation",
|
| 153 |
"test_split": "test",
|
| 154 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 155 |
"doc_to_target": "label",
|
| 156 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 157 |
"description": "",
|
|
@@ -176,7 +176,7 @@
|
|
| 176 |
"dataset_name": "qu",
|
| 177 |
"validation_split": "validation",
|
| 178 |
"test_split": "test",
|
| 179 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 180 |
"doc_to_target": "label",
|
| 181 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 182 |
"description": "",
|
|
@@ -201,7 +201,7 @@
|
|
| 201 |
"dataset_name": "sw",
|
| 202 |
"validation_split": "validation",
|
| 203 |
"test_split": "test",
|
| 204 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 205 |
"doc_to_target": "label",
|
| 206 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 207 |
"description": "",
|
|
@@ -226,7 +226,7 @@
|
|
| 226 |
"dataset_name": "ta",
|
| 227 |
"validation_split": "validation",
|
| 228 |
"test_split": "test",
|
| 229 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 230 |
"doc_to_target": "label",
|
| 231 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 232 |
"description": "",
|
|
@@ -251,7 +251,7 @@
|
|
| 251 |
"dataset_name": "th",
|
| 252 |
"validation_split": "validation",
|
| 253 |
"test_split": "test",
|
| 254 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 255 |
"doc_to_target": "label",
|
| 256 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 257 |
"description": "",
|
|
@@ -276,7 +276,7 @@
|
|
| 276 |
"dataset_name": "tr",
|
| 277 |
"validation_split": "validation",
|
| 278 |
"test_split": "test",
|
| 279 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 280 |
"doc_to_target": "label",
|
| 281 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 282 |
"description": "",
|
|
@@ -301,7 +301,7 @@
|
|
| 301 |
"dataset_name": "vi",
|
| 302 |
"validation_split": "validation",
|
| 303 |
"test_split": "test",
|
| 304 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 305 |
"doc_to_target": "label",
|
| 306 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 307 |
"description": "",
|
|
@@ -326,7 +326,7 @@
|
|
| 326 |
"dataset_name": "zh",
|
| 327 |
"validation_split": "validation",
|
| 328 |
"test_split": "test",
|
| 329 |
-
"doc_to_text": "functools.partial(<function doc_to_text at
|
| 330 |
"doc_to_target": "label",
|
| 331 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 332 |
"description": "",
|
|
@@ -386,5 +386,5 @@
|
|
| 386 |
"bootstrap_iters": 100000,
|
| 387 |
"gen_kwargs": null
|
| 388 |
},
|
| 389 |
-
"git_hash": "
|
| 390 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"xcopa": {
|
| 4 |
+
"acc,none": 0.5796363636363637,
|
| 5 |
+
"acc_stderr,none": 0.04497802716666114,
|
| 6 |
"alias": "xcopa"
|
| 7 |
},
|
| 8 |
"xcopa_et": {
|
| 9 |
+
"acc,none": 0.564,
|
| 10 |
+
"acc_stderr,none": 0.022198954641476802,
|
| 11 |
"alias": " - xcopa_et"
|
| 12 |
},
|
| 13 |
"xcopa_ht": {
|
| 14 |
"acc,none": 0.508,
|
| 15 |
+
"acc_stderr,none": 0.022380208834928028,
|
| 16 |
"alias": " - xcopa_ht"
|
| 17 |
},
|
| 18 |
"xcopa_id": {
|
| 19 |
+
"acc,none": 0.638,
|
| 20 |
+
"acc_stderr,none": 0.021513662527582404,
|
| 21 |
"alias": " - xcopa_id"
|
| 22 |
},
|
| 23 |
"xcopa_it": {
|
| 24 |
"acc,none": 0.638,
|
| 25 |
+
"acc_stderr,none": 0.021513662527582404,
|
| 26 |
"alias": " - xcopa_it"
|
| 27 |
},
|
| 28 |
"xcopa_qu": {
|
| 29 |
+
"acc,none": 0.522,
|
| 30 |
+
"acc_stderr,none": 0.02236139673920786,
|
| 31 |
"alias": " - xcopa_qu"
|
| 32 |
},
|
| 33 |
"xcopa_sw": {
|
| 34 |
+
"acc,none": 0.564,
|
| 35 |
+
"acc_stderr,none": 0.022198954641476802,
|
| 36 |
"alias": " - xcopa_sw"
|
| 37 |
},
|
| 38 |
"xcopa_ta": {
|
| 39 |
+
"acc,none": 0.542,
|
| 40 |
+
"acc_stderr,none": 0.022303966774269948,
|
| 41 |
"alias": " - xcopa_ta"
|
| 42 |
},
|
| 43 |
"xcopa_th": {
|
| 44 |
"acc,none": 0.566,
|
| 45 |
+
"acc_stderr,none": 0.022187215803029004,
|
| 46 |
"alias": " - xcopa_th"
|
| 47 |
},
|
| 48 |
"xcopa_tr": {
|
| 49 |
+
"acc,none": 0.562,
|
| 50 |
+
"acc_stderr,none": 0.022210326363977417,
|
| 51 |
"alias": " - xcopa_tr"
|
| 52 |
},
|
| 53 |
"xcopa_vi": {
|
| 54 |
+
"acc,none": 0.618,
|
| 55 |
+
"acc_stderr,none": 0.02175082059125084,
|
| 56 |
"alias": " - xcopa_vi"
|
| 57 |
},
|
| 58 |
"xcopa_zh": {
|
| 59 |
+
"acc,none": 0.654,
|
| 60 |
+
"acc_stderr,none": 0.021294951277234634,
|
| 61 |
"alias": " - xcopa_zh"
|
| 62 |
}
|
| 63 |
},
|
| 64 |
"groups": {
|
| 65 |
"xcopa": {
|
| 66 |
+
"acc,none": 0.5796363636363637,
|
| 67 |
+
"acc_stderr,none": 0.04497802716666114,
|
| 68 |
"alias": "xcopa"
|
| 69 |
}
|
| 70 |
},
|
|
|
|
| 76 |
"dataset_name": "et",
|
| 77 |
"validation_split": "validation",
|
| 78 |
"test_split": "test",
|
| 79 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a9aac7c0>, connector={'cause': 'sest', 'effect': 'seetõttu'})",
|
| 80 |
"doc_to_target": "label",
|
| 81 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 82 |
"description": "",
|
|
|
|
| 101 |
"dataset_name": "ht",
|
| 102 |
"validation_split": "validation",
|
| 103 |
"test_split": "test",
|
| 104 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a9bcbce0>, connector={'cause': 'poukisa', 'effect': 'donk sa'})",
|
| 105 |
"doc_to_target": "label",
|
| 106 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 107 |
"description": "",
|
|
|
|
| 126 |
"dataset_name": "id",
|
| 127 |
"validation_split": "validation",
|
| 128 |
"test_split": "test",
|
| 129 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a9bb98a0>, connector={'cause': 'karena', 'effect': 'maka'})",
|
| 130 |
"doc_to_target": "label",
|
| 131 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 132 |
"description": "",
|
|
|
|
| 151 |
"dataset_name": "it",
|
| 152 |
"validation_split": "validation",
|
| 153 |
"test_split": "test",
|
| 154 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a9bb8900>, connector={'cause': 'perché', 'effect': 'quindi'})",
|
| 155 |
"doc_to_target": "label",
|
| 156 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 157 |
"description": "",
|
|
|
|
| 176 |
"dataset_name": "qu",
|
| 177 |
"validation_split": "validation",
|
| 178 |
"test_split": "test",
|
| 179 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a9bb8680>, connector={'cause': 'imataq', 'effect': 'chaymi'})",
|
| 180 |
"doc_to_target": "label",
|
| 181 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 182 |
"description": "",
|
|
|
|
| 201 |
"dataset_name": "sw",
|
| 202 |
"validation_split": "validation",
|
| 203 |
"test_split": "test",
|
| 204 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a9bb8d60>, connector={'cause': 'kwa sababu', 'effect': 'kwa hiyo'})",
|
| 205 |
"doc_to_target": "label",
|
| 206 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 207 |
"description": "",
|
|
|
|
| 226 |
"dataset_name": "ta",
|
| 227 |
"validation_split": "validation",
|
| 228 |
"test_split": "test",
|
| 229 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a9bbb880>, connector={'cause': 'காரணமாக', 'effect': 'எனவே'})",
|
| 230 |
"doc_to_target": "label",
|
| 231 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 232 |
"description": "",
|
|
|
|
| 251 |
"dataset_name": "th",
|
| 252 |
"validation_split": "validation",
|
| 253 |
"test_split": "test",
|
| 254 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a99ce520>, connector={'cause': 'เพราะ', 'effect': 'ดังนั้น'})",
|
| 255 |
"doc_to_target": "label",
|
| 256 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 257 |
"description": "",
|
|
|
|
| 276 |
"dataset_name": "tr",
|
| 277 |
"validation_split": "validation",
|
| 278 |
"test_split": "test",
|
| 279 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a99cf240>, connector={'cause': 'çünkü', 'effect': 'bu yüzden'})",
|
| 280 |
"doc_to_target": "label",
|
| 281 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 282 |
"description": "",
|
|
|
|
| 301 |
"dataset_name": "vi",
|
| 302 |
"validation_split": "validation",
|
| 303 |
"test_split": "test",
|
| 304 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a98d58a0>, connector={'cause': 'bởi vì', 'effect': 'vì vậy'})",
|
| 305 |
"doc_to_target": "label",
|
| 306 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 307 |
"description": "",
|
|
|
|
| 326 |
"dataset_name": "zh",
|
| 327 |
"validation_split": "validation",
|
| 328 |
"test_split": "test",
|
| 329 |
+
"doc_to_text": "functools.partial(<function doc_to_text at 0x7fb2a98d7c40>, connector={'cause': '因为', 'effect': '所以'})",
|
| 330 |
"doc_to_target": "label",
|
| 331 |
"doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
|
| 332 |
"description": "",
|
|
|
|
| 386 |
"bootstrap_iters": 100000,
|
| 387 |
"gen_kwargs": null
|
| 388 |
},
|
| 389 |
+
"git_hash": "71d574c"
|
| 390 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c7f42b8489db204c81bae33d867562ef4e4d90c997aac253f22f15228a8de575
|
| 3 |
+
size 48737
|
lm-eval-output/RWKV/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -1,90 +1,90 @@
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"xnli": {
|
| 4 |
-
"acc,none": 0.
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "xnli"
|
| 7 |
},
|
| 8 |
"xnli_ar": {
|
| 9 |
"acc,none": 0.3345381526104418,
|
| 10 |
-
"acc_stderr,none": 0.
|
| 11 |
"alias": " - xnli_ar"
|
| 12 |
},
|
| 13 |
"xnli_bg": {
|
| 14 |
"acc,none": 0.42610441767068274,
|
| 15 |
-
"acc_stderr,none": 0.
|
| 16 |
"alias": " - xnli_bg"
|
| 17 |
},
|
| 18 |
"xnli_de": {
|
| 19 |
"acc,none": 0.44859437751004017,
|
| 20 |
-
"acc_stderr,none": 0.
|
| 21 |
"alias": " - xnli_de"
|
| 22 |
},
|
| 23 |
"xnli_el": {
|
| 24 |
"acc,none": 0.37349397590361444,
|
| 25 |
-
"acc_stderr,none": 0.
|
| 26 |
"alias": " - xnli_el"
|
| 27 |
},
|
| 28 |
"xnli_en": {
|
| 29 |
-
"acc,none": 0.
|
| 30 |
-
"acc_stderr,none": 0.
|
| 31 |
"alias": " - xnli_en"
|
| 32 |
},
|
| 33 |
"xnli_es": {
|
| 34 |
"acc,none": 0.4566265060240964,
|
| 35 |
-
"acc_stderr,none": 0.
|
| 36 |
"alias": " - xnli_es"
|
| 37 |
},
|
| 38 |
"xnli_fr": {
|
| 39 |
-
"acc,none": 0.
|
| 40 |
-
"acc_stderr,none": 0.
|
| 41 |
"alias": " - xnli_fr"
|
| 42 |
},
|
| 43 |
"xnli_hi": {
|
| 44 |
"acc,none": 0.3682730923694779,
|
| 45 |
-
"acc_stderr,none": 0.
|
| 46 |
"alias": " - xnli_hi"
|
| 47 |
},
|
| 48 |
"xnli_ru": {
|
| 49 |
-
"acc,none": 0.
|
| 50 |
-
"acc_stderr,none": 0.
|
| 51 |
"alias": " - xnli_ru"
|
| 52 |
},
|
| 53 |
"xnli_sw": {
|
| 54 |
-
"acc,none": 0.
|
| 55 |
-
"acc_stderr,none": 0.
|
| 56 |
"alias": " - xnli_sw"
|
| 57 |
},
|
| 58 |
"xnli_th": {
|
| 59 |
"acc,none": 0.38473895582329315,
|
| 60 |
-
"acc_stderr,none": 0.
|
| 61 |
"alias": " - xnli_th"
|
| 62 |
},
|
| 63 |
"xnli_tr": {
|
| 64 |
"acc,none": 0.39799196787148594,
|
| 65 |
-
"acc_stderr,none": 0.
|
| 66 |
"alias": " - xnli_tr"
|
| 67 |
},
|
| 68 |
"xnli_ur": {
|
| 69 |
"acc,none": 0.3506024096385542,
|
| 70 |
-
"acc_stderr,none": 0.
|
| 71 |
"alias": " - xnli_ur"
|
| 72 |
},
|
| 73 |
"xnli_vi": {
|
| 74 |
-
"acc,none": 0.
|
| 75 |
-
"acc_stderr,none": 0.
|
| 76 |
"alias": " - xnli_vi"
|
| 77 |
},
|
| 78 |
"xnli_zh": {
|
| 79 |
"acc,none": 0.342570281124498,
|
| 80 |
-
"acc_stderr,none": 0.
|
| 81 |
"alias": " - xnli_zh"
|
| 82 |
}
|
| 83 |
},
|
| 84 |
"groups": {
|
| 85 |
"xnli": {
|
| 86 |
-
"acc,none": 0.
|
| 87 |
-
"acc_stderr,none": 0.
|
| 88 |
"alias": "xnli"
|
| 89 |
}
|
| 90 |
},
|
|
@@ -544,5 +544,5 @@
|
|
| 544 |
"bootstrap_iters": 100000,
|
| 545 |
"gen_kwargs": null
|
| 546 |
},
|
| 547 |
-
"git_hash": "
|
| 548 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"results": {
|
| 3 |
"xnli": {
|
| 4 |
+
"acc,none": 0.40441767068273093,
|
| 5 |
+
"acc_stderr,none": 0.04637114849547773,
|
| 6 |
"alias": "xnli"
|
| 7 |
},
|
| 8 |
"xnli_ar": {
|
| 9 |
"acc,none": 0.3345381526104418,
|
| 10 |
+
"acc_stderr,none": 0.009457404390939167,
|
| 11 |
"alias": " - xnli_ar"
|
| 12 |
},
|
| 13 |
"xnli_bg": {
|
| 14 |
"acc,none": 0.42610441767068274,
|
| 15 |
+
"acc_stderr,none": 0.00991201637745909,
|
| 16 |
"alias": " - xnli_bg"
|
| 17 |
},
|
| 18 |
"xnli_de": {
|
| 19 |
"acc,none": 0.44859437751004017,
|
| 20 |
+
"acc_stderr,none": 0.009968964736894265,
|
| 21 |
"alias": " - xnli_de"
|
| 22 |
},
|
| 23 |
"xnli_el": {
|
| 24 |
"acc,none": 0.37349397590361444,
|
| 25 |
+
"acc_stderr,none": 0.009695985962219761,
|
| 26 |
"alias": " - xnli_el"
|
| 27 |
},
|
| 28 |
"xnli_en": {
|
| 29 |
+
"acc,none": 0.510441767068273,
|
| 30 |
+
"acc_stderr,none": 0.010019887205677435,
|
| 31 |
"alias": " - xnli_en"
|
| 32 |
},
|
| 33 |
"xnli_es": {
|
| 34 |
"acc,none": 0.4566265060240964,
|
| 35 |
+
"acc_stderr,none": 0.009984293410840311,
|
| 36 |
"alias": " - xnli_es"
|
| 37 |
},
|
| 38 |
"xnli_fr": {
|
| 39 |
+
"acc,none": 0.4566265060240964,
|
| 40 |
+
"acc_stderr,none": 0.00998429341084031,
|
| 41 |
"alias": " - xnli_fr"
|
| 42 |
},
|
| 43 |
"xnli_hi": {
|
| 44 |
"acc,none": 0.3682730923694779,
|
| 45 |
+
"acc_stderr,none": 0.00966801317899845,
|
| 46 |
"alias": " - xnli_hi"
|
| 47 |
},
|
| 48 |
"xnli_ru": {
|
| 49 |
+
"acc,none": 0.44899598393574297,
|
| 50 |
+
"acc_stderr,none": 0.00996979347724083,
|
| 51 |
"alias": " - xnli_ru"
|
| 52 |
},
|
| 53 |
"xnli_sw": {
|
| 54 |
+
"acc,none": 0.3353413654618474,
|
| 55 |
+
"acc_stderr,none": 0.00946303489151269,
|
| 56 |
"alias": " - xnli_sw"
|
| 57 |
},
|
| 58 |
"xnli_th": {
|
| 59 |
"acc,none": 0.38473895582329315,
|
| 60 |
+
"acc_stderr,none": 0.009752149307152517,
|
| 61 |
"alias": " - xnli_th"
|
| 62 |
},
|
| 63 |
"xnli_tr": {
|
| 64 |
"acc,none": 0.39799196787148594,
|
| 65 |
+
"acc_stderr,none": 0.00981128402642559,
|
| 66 |
"alias": " - xnli_tr"
|
| 67 |
},
|
| 68 |
"xnli_ur": {
|
| 69 |
"acc,none": 0.3506024096385542,
|
| 70 |
+
"acc_stderr,none": 0.009564237156206096,
|
| 71 |
"alias": " - xnli_ur"
|
| 72 |
},
|
| 73 |
"xnli_vi": {
|
| 74 |
+
"acc,none": 0.43132530120481927,
|
| 75 |
+
"acc_stderr,none": 0.009927090290379257,
|
| 76 |
"alias": " - xnli_vi"
|
| 77 |
},
|
| 78 |
"xnli_zh": {
|
| 79 |
"acc,none": 0.342570281124498,
|
| 80 |
+
"acc_stderr,none": 0.009512333319470365,
|
| 81 |
"alias": " - xnli_zh"
|
| 82 |
}
|
| 83 |
},
|
| 84 |
"groups": {
|
| 85 |
"xnli": {
|
| 86 |
+
"acc,none": 0.40441767068273093,
|
| 87 |
+
"acc_stderr,none": 0.04637114849547773,
|
| 88 |
"alias": "xnli"
|
| 89 |
}
|
| 90 |
},
|
|
|
|
| 544 |
"bootstrap_iters": 100000,
|
| 545 |
"gen_kwargs": null
|
| 546 |
},
|
| 547 |
+
"git_hash": "71d574c"
|
| 548 |
}
|
lm-eval-output/RWKV/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b0b1c57ccd05d567bb6e8b13849addd48d56c13d9af44e96515635ad161cd7dd
|
| 3 |
+
size 55729
|
lm-eval-output/RWKV/rwkv-5-world-1b5/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json
CHANGED
|
@@ -2,17 +2,17 @@
|
|
| 2 |
"results": {
|
| 3 |
"xstorycloze": {
|
| 4 |
"acc,none": 0.5785452138860477,
|
| 5 |
-
"acc_stderr,none": 0.
|
| 6 |
"alias": "xstorycloze"
|
| 7 |
},
|
| 8 |
"xstorycloze_ar": {
|
| 9 |
"acc,none": 0.5373924553275976,
|
| 10 |
-
"acc_stderr,none": 0.
|
| 11 |
"alias": " - xstorycloze_ar"
|
| 12 |
},
|
| 13 |
"xstorycloze_en": {
|
| 14 |
"acc,none": 0.7200529450694904,
|
| 15 |
-
"acc_stderr,none": 0.
|
| 16 |
"alias": " - xstorycloze_en"
|
| 17 |
},
|
| 18 |
"xstorycloze_es": {
|
|
@@ -27,44 +27,44 @@
|
|
| 27 |
},
|
| 28 |
"xstorycloze_hi": {
|
| 29 |
"acc,none": 0.5407015221707479,
|
| 30 |
-
"acc_stderr,none": 0.
|
| 31 |
"alias": " - xstorycloze_hi"
|
| 32 |
},
|
| 33 |
"xstorycloze_id": {
|
| 34 |
"acc,none": 0.614824619457313,
|
| 35 |
-
"acc_stderr,none": 0.
|
| 36 |
"alias": " - xstorycloze_id"
|
| 37 |
},
|
| 38 |
"xstorycloze_my": {
|
| 39 |
"acc,none": 0.49172733289212445,
|
| 40 |
-
"acc_stderr,none": 0.
|
| 41 |
"alias": " - xstorycloze_my"
|
| 42 |
},
|
| 43 |
"xstorycloze_ru": {
|
| 44 |
"acc,none": 0.6207809397749835,
|
| 45 |
-
"acc_stderr,none": 0.
|
| 46 |
"alias": " - xstorycloze_ru"
|
| 47 |
},
|
| 48 |
"xstorycloze_sw": {
|
| 49 |
"acc,none": 0.5115817339510258,
|
| 50 |
-
"acc_stderr,none": 0.
|
| 51 |
"alias": " - xstorycloze_sw"
|
| 52 |
},
|
| 53 |
"xstorycloze_te": {
|
| 54 |
"acc,none": 0.5691594970218399,
|
| 55 |
-
"acc_stderr,none": 0.
|
| 56 |
"alias": " - xstorycloze_te"
|
| 57 |
},
|
| 58 |
"xstorycloze_zh": {
|
| 59 |
"acc,none": 0.5949702183984117,
|
| 60 |
-
"acc_stderr,none": 0.
|
| 61 |
"alias": " - xstorycloze_zh"
|
| 62 |
}
|
| 63 |
},
|
| 64 |
"groups": {
|
| 65 |
"xstorycloze": {
|
| 66 |
"acc,none": 0.5785452138860477,
|
| 67 |
-
"acc_stderr,none": 0.
|
| 68 |
"alias": "xstorycloze"
|
| 69 |
}
|
| 70 |
},
|
|
@@ -419,5 +419,5 @@
|
|
| 419 |
"bootstrap_iters": 100000,
|
| 420 |
"gen_kwargs": null
|
| 421 |
},
|
| 422 |
-
"git_hash": "
|
| 423 |
}
|
|
|
|
| 2 |
"results": {
|
| 3 |
"xstorycloze": {
|
| 4 |
"acc,none": 0.5785452138860477,
|
| 5 |
+
"acc_stderr,none": 0.046882211406773226,
|
| 6 |
"alias": "xstorycloze"
|
| 7 |
},
|
| 8 |
"xstorycloze_ar": {
|
| 9 |
"acc,none": 0.5373924553275976,
|
| 10 |
+
"acc_stderr,none": 0.012831093347016556,
|
| 11 |
"alias": " - xstorycloze_ar"
|
| 12 |
},
|
| 13 |
"xstorycloze_en": {
|
| 14 |
"acc,none": 0.7200529450694904,
|
| 15 |
+
"acc_stderr,none": 0.011553982180012723,
|
| 16 |
"alias": " - xstorycloze_en"
|
| 17 |
},
|
| 18 |
"xstorycloze_es": {
|
|
|
|
| 27 |
},
|
| 28 |
"xstorycloze_hi": {
|
| 29 |
"acc,none": 0.5407015221707479,
|
| 30 |
+
"acc_stderr,none": 0.012824422739625585,
|
| 31 |
"alias": " - xstorycloze_hi"
|
| 32 |
},
|
| 33 |
"xstorycloze_id": {
|
| 34 |
"acc,none": 0.614824619457313,
|
| 35 |
+
"acc_stderr,none": 0.012523231571141184,
|
| 36 |
"alias": " - xstorycloze_id"
|
| 37 |
},
|
| 38 |
"xstorycloze_my": {
|
| 39 |
"acc,none": 0.49172733289212445,
|
| 40 |
+
"acc_stderr,none": 0.012865364020375396,
|
| 41 |
"alias": " - xstorycloze_my"
|
| 42 |
},
|
| 43 |
"xstorycloze_ru": {
|
| 44 |
"acc,none": 0.6207809397749835,
|
| 45 |
+
"acc_stderr,none": 0.012486070771171334,
|
| 46 |
"alias": " - xstorycloze_ru"
|
| 47 |
},
|
| 48 |
"xstorycloze_sw": {
|
| 49 |
"acc,none": 0.5115817339510258,
|
| 50 |
+
"acc_stderr,none": 0.012863672949335879,
|
| 51 |
"alias": " - xstorycloze_sw"
|
| 52 |
},
|
| 53 |
"xstorycloze_te": {
|
| 54 |
"acc,none": 0.5691594970218399,
|
| 55 |
+
"acc_stderr,none": 0.012743443034698407,
|
| 56 |
"alias": " - xstorycloze_te"
|
| 57 |
},
|
| 58 |
"xstorycloze_zh": {
|
| 59 |
"acc,none": 0.5949702183984117,
|
| 60 |
+
"acc_stderr,none": 0.01263288721875138,
|
| 61 |
"alias": " - xstorycloze_zh"
|
| 62 |
}
|
| 63 |
},
|
| 64 |
"groups": {
|
| 65 |
"xstorycloze": {
|
| 66 |
"acc,none": 0.5785452138860477,
|
| 67 |
+
"acc_stderr,none": 0.046882211406773226,
|
| 68 |
"alias": "xstorycloze"
|
| 69 |
}
|
| 70 |
},
|
|
|
|
| 419 |
"bootstrap_iters": 100000,
|
| 420 |
"gen_kwargs": null
|
| 421 |
},
|
| 422 |
+
"git_hash": "71d574c"
|
| 423 |
}
|