choco9966
commited on
Commit
•
70a679f
1
Parent(s):
2c5773a
add backup results
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- 01-ai/Yi-1.5-9B-32K/result_2024-07-04 10:40:00.json +444 -0
- 01-ai/Yi-1.5-9B-Chat-16K/result_2024-05-20 16:53:58.json +444 -0
- 01-ai/Yi-1.5-9B-Chat/result_2024-05-13 18:14:51.json +444 -0
- 01-ai/Yi-1.5-9B/result_2024-05-13 18:14:57.json +444 -0
- 01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json +444 -0
- 01-ai/Yi-6B/result_2023-12-27 01:03:17.json +444 -0
- 01-ai/Yi-9B-200K/result_2024-07-08 15:13:14.json +444 -0
- 01-ai/Yi-9B/result_2024-05-16 07:03:08.json +444 -0
- 12thD/I-SOLAR-10.7B-dpo-sft-v0.1/result_2024-04-09 05:16:32.json +444 -0
- 12thD/ko-Llama-3-8B-sft-v0.1/result_2024-04-22 02:51:25.json +444 -0
- 12thD/ko-Llama-3-8B-sft-v0.3/result_2024-05-02 01:25:36.json +444 -0
- 12thD/ko-gemma-7b-sft-v1.5/result_2024-04-03 05:50:30.json +444 -0
- 42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json +444 -0
- 42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json +444 -0
- 42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json +444 -0
- 42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-v3-13b/result_2023-11-01 18:54:40.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json +444 -0
- 42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json +444 -0
- 42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json +444 -0
- 42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json +444 -0
- 42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json +444 -0
- 42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json +444 -0
- 42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json +444 -0
- 42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json +444 -0
- 42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json +444 -0
- 42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json +444 -0
- 4n3mone/KoSOLAR_merge_test_v0.1/result_2024-02-21 07:42:24.json +444 -0
- 4yo1/llama/result_2024-07-10 12:39:52.json +444 -0
- 4yo1/llama3-eng-ko-8-llama/result_2024-07-02 09:08:08.json +444 -0
- 4yo1/llama3-eng-ko-8/result_2024-07-02 08:52:49.json +444 -0
- 4yo1/llama3-eng-ko-8/result_2024-07-02 08:54:35.json +444 -0
- 4yo1/llama3-eng-ko-8b-sl/result_2024-07-03 13:25:55.json +444 -0
- 4yo1/llama3-eng-ko-8b-sl2/result_2024-07-04 14:21:25.json +444 -0
- 4yo1/llama3-eng-ko-8b-sl3/result_2024-07-08 02:28:51.json +444 -0
- 4yo1/llama3-eng-ko-8b-sl4/result_2024-07-10 12:28:24.json +444 -0
- 4yo1/llama3-eng-ko-8b-sl5/result_2024-07-16 00:48:26.json +444 -0
- 4yo1/llama3-eng-ko-8b-sl6_1/result_2024-07-16 04:26:59.json +444 -0
- 4yo1/llama3-eng-ko-8b/result_2024-06-24 08:56:27.json +444 -0
- 4yo1/llama3-pre1-ds-lora1/result_2024-07-18 01:07:43.json +444 -0
- 4yo1/llama3-pre1-ds-lora2/result_2024-07-19 01:53:13.json +444 -0
- 4yo1/llama3-pre1-ds-lora3/result_2024-07-23 07:30:22.json +444 -0
- 4yo1/llama3-pre1-pre2-ds-ins2-lora3/result_2024-07-31 06:39:29.json +444 -0
- 4yo1/llama3-pre1-pre2-ds-lora3-mergkit-base/result_2024-08-06 05:00:04.json +444 -0
- 4yo1/llama3-pre1-pre2-ds-lora3/result_2024-07-26 21:17:55.json +444 -0
01-ai/Yi-1.5-9B-32K/result_2024-07-04 10:40:00.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.29948805460750855,
|
5 |
+
"acc_stderr": 0.013385021637313567,
|
6 |
+
"acc_norm": 0.3506825938566553,
|
7 |
+
"acc_norm_stderr": 0.013944635930726089
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3333001394144593,
|
11 |
+
"acc_stderr": 0.004704293898729902,
|
12 |
+
"acc_norm": 0.4137621987651862,
|
13 |
+
"acc_norm_stderr": 0.004915003499517831
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.0383161053282193,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.0383161053282193
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5631067961165048,
|
23 |
+
"acc_stderr": 0.049111471073657764,
|
24 |
+
"acc_norm": 0.5631067961165048,
|
25 |
+
"acc_norm_stderr": 0.049111471073657764
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.47509578544061304,
|
29 |
+
"acc_stderr": 0.01785777070490102,
|
30 |
+
"acc_norm": 0.47509578544061304,
|
31 |
+
"acc_norm_stderr": 0.01785777070490102
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.28888888888888886,
|
35 |
+
"acc_stderr": 0.0391545063041425,
|
36 |
+
"acc_norm": 0.28888888888888886,
|
37 |
+
"acc_norm_stderr": 0.0391545063041425
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.31,
|
41 |
+
"acc_stderr": 0.04648231987117316,
|
42 |
+
"acc_norm": 0.31,
|
43 |
+
"acc_norm_stderr": 0.04648231987117316
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.46808510638297873,
|
47 |
+
"acc_stderr": 0.03261936918467382,
|
48 |
+
"acc_norm": 0.46808510638297873,
|
49 |
+
"acc_norm_stderr": 0.03261936918467382
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.45180722891566266,
|
53 |
+
"acc_stderr": 0.03874371556587953,
|
54 |
+
"acc_norm": 0.45180722891566266,
|
55 |
+
"acc_norm_stderr": 0.03874371556587953
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.47266881028938906,
|
59 |
+
"acc_stderr": 0.028355633568328188,
|
60 |
+
"acc_norm": 0.47266881028938906,
|
61 |
+
"acc_norm_stderr": 0.028355633568328188
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.45739910313901344,
|
65 |
+
"acc_stderr": 0.033435777055830646,
|
66 |
+
"acc_norm": 0.45739910313901344,
|
67 |
+
"acc_norm_stderr": 0.033435777055830646
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5267175572519084,
|
71 |
+
"acc_stderr": 0.04379024936553894,
|
72 |
+
"acc_norm": 0.5267175572519084,
|
73 |
+
"acc_norm_stderr": 0.04379024936553894
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.39,
|
77 |
+
"acc_stderr": 0.04902071300001975,
|
78 |
+
"acc_norm": 0.39,
|
79 |
+
"acc_norm_stderr": 0.04902071300001975
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5555555555555556,
|
83 |
+
"acc_stderr": 0.035402943770953675,
|
84 |
+
"acc_norm": 0.5555555555555556,
|
85 |
+
"acc_norm_stderr": 0.035402943770953675
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5724137931034483,
|
89 |
+
"acc_stderr": 0.04122737111370332,
|
90 |
+
"acc_norm": 0.5724137931034483,
|
91 |
+
"acc_norm_stderr": 0.04122737111370332
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3137254901960784,
|
95 |
+
"acc_stderr": 0.04617034827006716,
|
96 |
+
"acc_norm": 0.3137254901960784,
|
97 |
+
"acc_norm_stderr": 0.04617034827006716
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5,
|
101 |
+
"acc_stderr": 0.032478490123081544,
|
102 |
+
"acc_norm": 0.5,
|
103 |
+
"acc_norm_stderr": 0.032478490123081544
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.47692307692307695,
|
107 |
+
"acc_stderr": 0.025323990861736125,
|
108 |
+
"acc_norm": 0.47692307692307695,
|
109 |
+
"acc_norm_stderr": 0.025323990861736125
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.53,
|
113 |
+
"acc_stderr": 0.05016135580465919,
|
114 |
+
"acc_norm": 0.53,
|
115 |
+
"acc_norm_stderr": 0.05016135580465919
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.37,
|
119 |
+
"acc_stderr": 0.048523658709391,
|
120 |
+
"acc_norm": 0.37,
|
121 |
+
"acc_norm_stderr": 0.048523658709391
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5740740740740741,
|
125 |
+
"acc_stderr": 0.047803436269367894,
|
126 |
+
"acc_norm": 0.5740740740740741,
|
127 |
+
"acc_norm_stderr": 0.047803436269367894
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4187192118226601,
|
131 |
+
"acc_stderr": 0.03471192860518468,
|
132 |
+
"acc_norm": 0.4187192118226601,
|
133 |
+
"acc_norm_stderr": 0.03471192860518468
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.47419354838709676,
|
137 |
+
"acc_stderr": 0.02840609505765332,
|
138 |
+
"acc_norm": 0.47419354838709676,
|
139 |
+
"acc_norm_stderr": 0.02840609505765332
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6752136752136753,
|
143 |
+
"acc_stderr": 0.03067902276549883,
|
144 |
+
"acc_norm": 0.6752136752136753,
|
145 |
+
"acc_norm_stderr": 0.03067902276549883
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.44150943396226416,
|
149 |
+
"acc_stderr": 0.030561590426731833,
|
150 |
+
"acc_norm": 0.44150943396226416,
|
151 |
+
"acc_norm_stderr": 0.030561590426731833
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4727272727272727,
|
155 |
+
"acc_stderr": 0.04782001791380063,
|
156 |
+
"acc_norm": 0.4727272727272727,
|
157 |
+
"acc_norm_stderr": 0.04782001791380063
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.4185185185185185,
|
161 |
+
"acc_stderr": 0.030078013075022066,
|
162 |
+
"acc_norm": 0.4185185185185185,
|
163 |
+
"acc_norm_stderr": 0.030078013075022066
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.304635761589404,
|
167 |
+
"acc_stderr": 0.03757949922943343,
|
168 |
+
"acc_norm": 0.304635761589404,
|
169 |
+
"acc_norm_stderr": 0.03757949922943343
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.6069651741293532,
|
173 |
+
"acc_stderr": 0.0345368246603156,
|
174 |
+
"acc_norm": 0.6069651741293532,
|
175 |
+
"acc_norm_stderr": 0.0345368246603156
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4046242774566474,
|
179 |
+
"acc_stderr": 0.03742461193887248,
|
180 |
+
"acc_norm": 0.4046242774566474,
|
181 |
+
"acc_norm_stderr": 0.03742461193887248
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.5476190476190477,
|
185 |
+
"acc_stderr": 0.02563425811555495,
|
186 |
+
"acc_norm": 0.5476190476190477,
|
187 |
+
"acc_norm_stderr": 0.02563425811555495
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3472222222222222,
|
191 |
+
"acc_stderr": 0.039812405437178615,
|
192 |
+
"acc_norm": 0.3472222222222222,
|
193 |
+
"acc_norm_stderr": 0.039812405437178615
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.33,
|
197 |
+
"acc_stderr": 0.04725815626252605,
|
198 |
+
"acc_norm": 0.33,
|
199 |
+
"acc_norm_stderr": 0.04725815626252605
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.57,
|
203 |
+
"acc_stderr": 0.04975698519562426,
|
204 |
+
"acc_norm": 0.57,
|
205 |
+
"acc_norm_stderr": 0.04975698519562426
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.49710982658959535,
|
209 |
+
"acc_stderr": 0.026918645383239015,
|
210 |
+
"acc_norm": 0.49710982658959535,
|
211 |
+
"acc_norm_stderr": 0.026918645383239015
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.5276073619631901,
|
215 |
+
"acc_stderr": 0.03922378290610991,
|
216 |
+
"acc_norm": 0.5276073619631901,
|
217 |
+
"acc_norm_stderr": 0.03922378290610991
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49691358024691357,
|
221 |
+
"acc_stderr": 0.027820214158594377,
|
222 |
+
"acc_norm": 0.49691358024691357,
|
223 |
+
"acc_norm_stderr": 0.027820214158594377
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.45,
|
227 |
+
"acc_stderr": 0.05,
|
228 |
+
"acc_norm": 0.45,
|
229 |
+
"acc_norm_stderr": 0.05
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.49222797927461137,
|
233 |
+
"acc_stderr": 0.03608003225569654,
|
234 |
+
"acc_norm": 0.49222797927461137,
|
235 |
+
"acc_norm_stderr": 0.03608003225569654
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.41228070175438597,
|
239 |
+
"acc_stderr": 0.046306532033665956,
|
240 |
+
"acc_norm": 0.41228070175438597,
|
241 |
+
"acc_norm_stderr": 0.046306532033665956
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5027522935779817,
|
245 |
+
"acc_stderr": 0.02143699835976532,
|
246 |
+
"acc_norm": 0.5027522935779817,
|
247 |
+
"acc_norm_stderr": 0.02143699835976532
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.40476190476190477,
|
251 |
+
"acc_stderr": 0.04390259265377561,
|
252 |
+
"acc_norm": 0.40476190476190477,
|
253 |
+
"acc_norm_stderr": 0.04390259265377561
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.49019607843137253,
|
257 |
+
"acc_stderr": 0.028624412550167958,
|
258 |
+
"acc_norm": 0.49019607843137253,
|
259 |
+
"acc_norm_stderr": 0.028624412550167958
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.5,
|
263 |
+
"acc_stderr": 0.050251890762960605,
|
264 |
+
"acc_norm": 0.5,
|
265 |
+
"acc_norm_stderr": 0.050251890762960605
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.7355371900826446,
|
269 |
+
"acc_stderr": 0.04026187527591205,
|
270 |
+
"acc_norm": 0.7355371900826446,
|
271 |
+
"acc_norm_stderr": 0.04026187527591205
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.45394736842105265,
|
275 |
+
"acc_stderr": 0.04051646342874142,
|
276 |
+
"acc_norm": 0.45394736842105265,
|
277 |
+
"acc_norm_stderr": 0.04051646342874142
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.39705882352941174,
|
281 |
+
"acc_stderr": 0.019794488900024113,
|
282 |
+
"acc_norm": 0.39705882352941174,
|
283 |
+
"acc_norm_stderr": 0.019794488900024113
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.40070921985815605,
|
287 |
+
"acc_stderr": 0.029233465745573086,
|
288 |
+
"acc_norm": 0.40070921985815605,
|
289 |
+
"acc_norm_stderr": 0.029233465745573086
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.39285714285714285,
|
293 |
+
"acc_stderr": 0.04635550135609976,
|
294 |
+
"acc_norm": 0.39285714285714285,
|
295 |
+
"acc_norm_stderr": 0.04635550135609976
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4675925925925926,
|
299 |
+
"acc_stderr": 0.034028015813589656,
|
300 |
+
"acc_norm": 0.4675925925925926,
|
301 |
+
"acc_norm_stderr": 0.034028015813589656
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.3329608938547486,
|
305 |
+
"acc_stderr": 0.015761716178397552,
|
306 |
+
"acc_norm": 0.3329608938547486,
|
307 |
+
"acc_norm_stderr": 0.015761716178397552
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.43,
|
311 |
+
"acc_stderr": 0.049756985195624284,
|
312 |
+
"acc_norm": 0.43,
|
313 |
+
"acc_norm_stderr": 0.049756985195624284
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.76,
|
317 |
+
"acc_stderr": 0.042923469599092816,
|
318 |
+
"acc_norm": 0.76,
|
319 |
+
"acc_norm_stderr": 0.042923469599092816
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.35294117647058826,
|
323 |
+
"acc_stderr": 0.029029422815681404,
|
324 |
+
"acc_norm": 0.35294117647058826,
|
325 |
+
"acc_norm_stderr": 0.029029422815681404
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.6163265306122448,
|
329 |
+
"acc_stderr": 0.031130880396235943,
|
330 |
+
"acc_norm": 0.6163265306122448,
|
331 |
+
"acc_norm_stderr": 0.031130880396235943
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5654008438818565,
|
335 |
+
"acc_stderr": 0.03226759995510145,
|
336 |
+
"acc_norm": 0.5654008438818565,
|
337 |
+
"acc_norm_stderr": 0.03226759995510145
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.36571056062581486,
|
341 |
+
"acc_stderr": 0.012301028188840567,
|
342 |
+
"acc_norm": 0.36571056062581486,
|
343 |
+
"acc_norm_stderr": 0.012301028188840567
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4852941176470588,
|
347 |
+
"acc_stderr": 0.03507793834791324,
|
348 |
+
"acc_norm": 0.4852941176470588,
|
349 |
+
"acc_norm_stderr": 0.03507793834791324
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5151515151515151,
|
353 |
+
"acc_stderr": 0.03902551007374448,
|
354 |
+
"acc_norm": 0.5151515151515151,
|
355 |
+
"acc_norm_stderr": 0.03902551007374448
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2937576499388005,
|
359 |
+
"mc1_stderr": 0.015945068581236614,
|
360 |
+
"mc2": 0.4670848140389129,
|
361 |
+
"mc2_stderr": 0.01585178282587417
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.47107438016528924,
|
365 |
+
"acc_stderr": 0.017161563949916348,
|
366 |
+
"acc_norm": 0.5171192443919717,
|
367 |
+
"acc_norm_stderr": 0.017180275246085626
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-1.5-9B-32K",
|
436 |
+
"model_sha": "c0239dbc923b8a2b5ca849763bdd592d39c60850",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
01-ai/Yi-1.5-9B-Chat-16K/result_2024-05-20 16:53:58.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3122866894197952,
|
5 |
+
"acc_stderr": 0.013542598541688065,
|
6 |
+
"acc_norm": 0.35238907849829354,
|
7 |
+
"acc_norm_stderr": 0.013960142600598673
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3403704441346345,
|
11 |
+
"acc_stderr": 0.004728653488866913,
|
12 |
+
"acc_norm": 0.4166500697072296,
|
13 |
+
"acc_norm_stderr": 0.004919962822208309
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.45614035087719296,
|
17 |
+
"acc_stderr": 0.03820042586602966,
|
18 |
+
"acc_norm": 0.45614035087719296,
|
19 |
+
"acc_norm_stderr": 0.03820042586602966
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5242718446601942,
|
23 |
+
"acc_stderr": 0.04944901092973779,
|
24 |
+
"acc_norm": 0.5242718446601942,
|
25 |
+
"acc_norm_stderr": 0.04944901092973779
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.47509578544061304,
|
29 |
+
"acc_stderr": 0.01785777070490102,
|
30 |
+
"acc_norm": 0.47509578544061304,
|
31 |
+
"acc_norm_stderr": 0.01785777070490102
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.2814814814814815,
|
35 |
+
"acc_stderr": 0.03885004245800254,
|
36 |
+
"acc_norm": 0.2814814814814815,
|
37 |
+
"acc_norm_stderr": 0.03885004245800254
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4297872340425532,
|
47 |
+
"acc_stderr": 0.03236214467715564,
|
48 |
+
"acc_norm": 0.4297872340425532,
|
49 |
+
"acc_norm_stderr": 0.03236214467715564
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.4457831325301205,
|
53 |
+
"acc_stderr": 0.03869543323472101,
|
54 |
+
"acc_norm": 0.4457831325301205,
|
55 |
+
"acc_norm_stderr": 0.03869543323472101
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4630225080385852,
|
59 |
+
"acc_stderr": 0.02832032583010592,
|
60 |
+
"acc_norm": 0.4630225080385852,
|
61 |
+
"acc_norm_stderr": 0.02832032583010592
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.49327354260089684,
|
65 |
+
"acc_stderr": 0.03355476596234354,
|
66 |
+
"acc_norm": 0.49327354260089684,
|
67 |
+
"acc_norm_stderr": 0.03355476596234354
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4961832061068702,
|
71 |
+
"acc_stderr": 0.04385162325601553,
|
72 |
+
"acc_norm": 0.4961832061068702,
|
73 |
+
"acc_norm_stderr": 0.04385162325601553
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.38,
|
77 |
+
"acc_stderr": 0.04878317312145631,
|
78 |
+
"acc_norm": 0.38,
|
79 |
+
"acc_norm_stderr": 0.04878317312145631
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.494949494949495,
|
83 |
+
"acc_stderr": 0.035621707606254015,
|
84 |
+
"acc_norm": 0.494949494949495,
|
85 |
+
"acc_norm_stderr": 0.035621707606254015
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5241379310344828,
|
89 |
+
"acc_stderr": 0.0416180850350153,
|
90 |
+
"acc_norm": 0.5241379310344828,
|
91 |
+
"acc_norm_stderr": 0.0416180850350153
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.29411764705882354,
|
95 |
+
"acc_stderr": 0.04533838195929776,
|
96 |
+
"acc_norm": 0.29411764705882354,
|
97 |
+
"acc_norm_stderr": 0.04533838195929776
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4789915966386555,
|
101 |
+
"acc_stderr": 0.03244980849990029,
|
102 |
+
"acc_norm": 0.4789915966386555,
|
103 |
+
"acc_norm_stderr": 0.03244980849990029
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.46153846153846156,
|
107 |
+
"acc_stderr": 0.025275892070240634,
|
108 |
+
"acc_norm": 0.46153846153846156,
|
109 |
+
"acc_norm_stderr": 0.025275892070240634
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.59,
|
113 |
+
"acc_stderr": 0.049431107042371025,
|
114 |
+
"acc_norm": 0.59,
|
115 |
+
"acc_norm_stderr": 0.049431107042371025
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.3,
|
119 |
+
"acc_stderr": 0.046056618647183814,
|
120 |
+
"acc_norm": 0.3,
|
121 |
+
"acc_norm_stderr": 0.046056618647183814
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5462962962962963,
|
125 |
+
"acc_stderr": 0.04812917324536823,
|
126 |
+
"acc_norm": 0.5462962962962963,
|
127 |
+
"acc_norm_stderr": 0.04812917324536823
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3793103448275862,
|
131 |
+
"acc_stderr": 0.03413963805906235,
|
132 |
+
"acc_norm": 0.3793103448275862,
|
133 |
+
"acc_norm_stderr": 0.03413963805906235
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.45483870967741935,
|
137 |
+
"acc_stderr": 0.02832774309156106,
|
138 |
+
"acc_norm": 0.45483870967741935,
|
139 |
+
"acc_norm_stderr": 0.02832774309156106
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6538461538461539,
|
143 |
+
"acc_stderr": 0.0311669573672359,
|
144 |
+
"acc_norm": 0.6538461538461539,
|
145 |
+
"acc_norm_stderr": 0.0311669573672359
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4226415094339623,
|
149 |
+
"acc_stderr": 0.03040233144576954,
|
150 |
+
"acc_norm": 0.4226415094339623,
|
151 |
+
"acc_norm_stderr": 0.03040233144576954
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.42727272727272725,
|
155 |
+
"acc_stderr": 0.04738198703545483,
|
156 |
+
"acc_norm": 0.42727272727272725,
|
157 |
+
"acc_norm_stderr": 0.04738198703545483
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.4,
|
161 |
+
"acc_stderr": 0.02986960509531691,
|
162 |
+
"acc_norm": 0.4,
|
163 |
+
"acc_norm_stderr": 0.02986960509531691
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2980132450331126,
|
167 |
+
"acc_stderr": 0.037345356767871984,
|
168 |
+
"acc_norm": 0.2980132450331126,
|
169 |
+
"acc_norm_stderr": 0.037345356767871984
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5522388059701493,
|
173 |
+
"acc_stderr": 0.03516184772952167,
|
174 |
+
"acc_norm": 0.5522388059701493,
|
175 |
+
"acc_norm_stderr": 0.03516184772952167
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.42196531791907516,
|
179 |
+
"acc_stderr": 0.037657466938651504,
|
180 |
+
"acc_norm": 0.42196531791907516,
|
181 |
+
"acc_norm_stderr": 0.037657466938651504
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.544973544973545,
|
185 |
+
"acc_stderr": 0.025646928361049398,
|
186 |
+
"acc_norm": 0.544973544973545,
|
187 |
+
"acc_norm_stderr": 0.025646928361049398
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2708333333333333,
|
191 |
+
"acc_stderr": 0.03716177437566017,
|
192 |
+
"acc_norm": 0.2708333333333333,
|
193 |
+
"acc_norm_stderr": 0.03716177437566017
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.37,
|
197 |
+
"acc_stderr": 0.04852365870939098,
|
198 |
+
"acc_norm": 0.37,
|
199 |
+
"acc_norm_stderr": 0.04852365870939098
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.65,
|
203 |
+
"acc_stderr": 0.04793724854411019,
|
204 |
+
"acc_norm": 0.65,
|
205 |
+
"acc_norm_stderr": 0.04793724854411019
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.4797687861271676,
|
209 |
+
"acc_stderr": 0.026897049996382875,
|
210 |
+
"acc_norm": 0.4797687861271676,
|
211 |
+
"acc_norm_stderr": 0.026897049996382875
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44785276073619634,
|
215 |
+
"acc_stderr": 0.03906947479456601,
|
216 |
+
"acc_norm": 0.44785276073619634,
|
217 |
+
"acc_norm_stderr": 0.03906947479456601
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49382716049382713,
|
221 |
+
"acc_stderr": 0.027818623962583295,
|
222 |
+
"acc_norm": 0.49382716049382713,
|
223 |
+
"acc_norm_stderr": 0.027818623962583295
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.45,
|
227 |
+
"acc_stderr": 0.049999999999999996,
|
228 |
+
"acc_norm": 0.45,
|
229 |
+
"acc_norm_stderr": 0.049999999999999996
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.47150259067357514,
|
233 |
+
"acc_stderr": 0.03602573571288441,
|
234 |
+
"acc_norm": 0.47150259067357514,
|
235 |
+
"acc_norm_stderr": 0.03602573571288441
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.38596491228070173,
|
239 |
+
"acc_stderr": 0.045796394220704355,
|
240 |
+
"acc_norm": 0.38596491228070173,
|
241 |
+
"acc_norm_stderr": 0.045796394220704355
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.46605504587155966,
|
245 |
+
"acc_stderr": 0.02138786335035399,
|
246 |
+
"acc_norm": 0.46605504587155966,
|
247 |
+
"acc_norm_stderr": 0.02138786335035399
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.4603174603174603,
|
251 |
+
"acc_stderr": 0.04458029125470973,
|
252 |
+
"acc_norm": 0.4603174603174603,
|
253 |
+
"acc_norm_stderr": 0.04458029125470973
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.45098039215686275,
|
257 |
+
"acc_stderr": 0.02849199358617157,
|
258 |
+
"acc_norm": 0.45098039215686275,
|
259 |
+
"acc_norm_stderr": 0.02849199358617157
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.53,
|
263 |
+
"acc_stderr": 0.050161355804659205,
|
264 |
+
"acc_norm": 0.53,
|
265 |
+
"acc_norm_stderr": 0.050161355804659205
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.628099173553719,
|
269 |
+
"acc_stderr": 0.04412015806624504,
|
270 |
+
"acc_norm": 0.628099173553719,
|
271 |
+
"acc_norm_stderr": 0.04412015806624504
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.375,
|
275 |
+
"acc_stderr": 0.039397364351956274,
|
276 |
+
"acc_norm": 0.375,
|
277 |
+
"acc_norm_stderr": 0.039397364351956274
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.39869281045751637,
|
281 |
+
"acc_stderr": 0.019808281317449848,
|
282 |
+
"acc_norm": 0.39869281045751637,
|
283 |
+
"acc_norm_stderr": 0.019808281317449848
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.39361702127659576,
|
287 |
+
"acc_stderr": 0.02914454478159615,
|
288 |
+
"acc_norm": 0.39361702127659576,
|
289 |
+
"acc_norm_stderr": 0.02914454478159615
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.4375,
|
293 |
+
"acc_stderr": 0.04708567521880525,
|
294 |
+
"acc_norm": 0.4375,
|
295 |
+
"acc_norm_stderr": 0.04708567521880525
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4537037037037037,
|
299 |
+
"acc_stderr": 0.03395322726375797,
|
300 |
+
"acc_norm": 0.4537037037037037,
|
301 |
+
"acc_norm_stderr": 0.03395322726375797
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.28938547486033517,
|
305 |
+
"acc_stderr": 0.015166544550490317,
|
306 |
+
"acc_norm": 0.28938547486033517,
|
307 |
+
"acc_norm_stderr": 0.015166544550490317
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.4,
|
311 |
+
"acc_stderr": 0.049236596391733084,
|
312 |
+
"acc_norm": 0.4,
|
313 |
+
"acc_norm_stderr": 0.049236596391733084
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.78,
|
317 |
+
"acc_stderr": 0.04163331998932261,
|
318 |
+
"acc_norm": 0.78,
|
319 |
+
"acc_norm_stderr": 0.04163331998932261
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.33088235294117646,
|
323 |
+
"acc_stderr": 0.02858270975389844,
|
324 |
+
"acc_norm": 0.33088235294117646,
|
325 |
+
"acc_norm_stderr": 0.02858270975389844
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5469387755102041,
|
329 |
+
"acc_stderr": 0.03186785930004128,
|
330 |
+
"acc_norm": 0.5469387755102041,
|
331 |
+
"acc_norm_stderr": 0.03186785930004128
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5274261603375527,
|
335 |
+
"acc_stderr": 0.03249822718301304,
|
336 |
+
"acc_norm": 0.5274261603375527,
|
337 |
+
"acc_norm_stderr": 0.03249822718301304
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3428943937418514,
|
341 |
+
"acc_stderr": 0.012123463271585892,
|
342 |
+
"acc_norm": 0.3428943937418514,
|
343 |
+
"acc_norm_stderr": 0.012123463271585892
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.45098039215686275,
|
347 |
+
"acc_stderr": 0.03492406104163614,
|
348 |
+
"acc_norm": 0.45098039215686275,
|
349 |
+
"acc_norm_stderr": 0.03492406104163614
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.47878787878787876,
|
353 |
+
"acc_stderr": 0.03900828913737302,
|
354 |
+
"acc_norm": 0.47878787878787876,
|
355 |
+
"acc_norm_stderr": 0.03900828913737302
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3219094247246022,
|
359 |
+
"mc1_stderr": 0.016355567611960397,
|
360 |
+
"mc2": 0.5121087237362004,
|
361 |
+
"mc2_stderr": 0.01621113484074564
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4923258559622196,
|
365 |
+
"acc_stderr": 0.017188329219654276,
|
366 |
+
"acc_norm": 0.5171192443919717,
|
367 |
+
"acc_norm_stderr": 0.017180275246085626
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-1.5-9B-Chat-16K",
|
436 |
+
"model_sha": "2b397e5f0fab87984efa66856c5c4ed4bbe68b50",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
01-ai/Yi-1.5-9B-Chat/result_2024-05-13 18:14:51.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.32081911262798635,
|
5 |
+
"acc_stderr": 0.013640943091946528,
|
6 |
+
"acc_norm": 0.35580204778157,
|
7 |
+
"acc_norm_stderr": 0.01399057113791876
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3337980481975702,
|
11 |
+
"acc_stderr": 0.004706048116764947,
|
12 |
+
"acc_norm": 0.40579565823541125,
|
13 |
+
"acc_norm_stderr": 0.004900417982582044
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.3742690058479532,
|
17 |
+
"acc_stderr": 0.03711601185389482,
|
18 |
+
"acc_norm": 0.3742690058479532,
|
19 |
+
"acc_norm_stderr": 0.03711601185389482
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5339805825242718,
|
23 |
+
"acc_stderr": 0.04939291447273481,
|
24 |
+
"acc_norm": 0.5339805825242718,
|
25 |
+
"acc_norm_stderr": 0.04939291447273481
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.438058748403576,
|
29 |
+
"acc_stderr": 0.017742232238257223,
|
30 |
+
"acc_norm": 0.438058748403576,
|
31 |
+
"acc_norm_stderr": 0.017742232238257223
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.2814814814814815,
|
35 |
+
"acc_stderr": 0.038850042458002526,
|
36 |
+
"acc_norm": 0.2814814814814815,
|
37 |
+
"acc_norm_stderr": 0.038850042458002526
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.24,
|
41 |
+
"acc_stderr": 0.04292346959909284,
|
42 |
+
"acc_norm": 0.24,
|
43 |
+
"acc_norm_stderr": 0.04292346959909284
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.46808510638297873,
|
47 |
+
"acc_stderr": 0.03261936918467382,
|
48 |
+
"acc_norm": 0.46808510638297873,
|
49 |
+
"acc_norm_stderr": 0.03261936918467382
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.4036144578313253,
|
53 |
+
"acc_stderr": 0.03819486140758397,
|
54 |
+
"acc_norm": 0.4036144578313253,
|
55 |
+
"acc_norm_stderr": 0.03819486140758397
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4340836012861736,
|
59 |
+
"acc_stderr": 0.02815023224453559,
|
60 |
+
"acc_norm": 0.4340836012861736,
|
61 |
+
"acc_norm_stderr": 0.02815023224453559
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4977578475336323,
|
65 |
+
"acc_stderr": 0.033557465352232634,
|
66 |
+
"acc_norm": 0.4977578475336323,
|
67 |
+
"acc_norm_stderr": 0.033557465352232634
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.42748091603053434,
|
71 |
+
"acc_stderr": 0.043389203057924,
|
72 |
+
"acc_norm": 0.42748091603053434,
|
73 |
+
"acc_norm_stderr": 0.043389203057924
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.53,
|
77 |
+
"acc_stderr": 0.050161355804659205,
|
78 |
+
"acc_norm": 0.53,
|
79 |
+
"acc_norm_stderr": 0.050161355804659205
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5151515151515151,
|
83 |
+
"acc_stderr": 0.0356071651653106,
|
84 |
+
"acc_norm": 0.5151515151515151,
|
85 |
+
"acc_norm_stderr": 0.0356071651653106
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5586206896551724,
|
89 |
+
"acc_stderr": 0.04137931034482757,
|
90 |
+
"acc_norm": 0.5586206896551724,
|
91 |
+
"acc_norm_stderr": 0.04137931034482757
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.24509803921568626,
|
95 |
+
"acc_stderr": 0.042801058373643966,
|
96 |
+
"acc_norm": 0.24509803921568626,
|
97 |
+
"acc_norm_stderr": 0.042801058373643966
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.46218487394957986,
|
101 |
+
"acc_stderr": 0.032385469487589795,
|
102 |
+
"acc_norm": 0.46218487394957986,
|
103 |
+
"acc_norm_stderr": 0.032385469487589795
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.49230769230769234,
|
107 |
+
"acc_stderr": 0.025348006031534788,
|
108 |
+
"acc_norm": 0.49230769230769234,
|
109 |
+
"acc_norm_stderr": 0.025348006031534788
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.55,
|
113 |
+
"acc_stderr": 0.04999999999999999,
|
114 |
+
"acc_norm": 0.55,
|
115 |
+
"acc_norm_stderr": 0.04999999999999999
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.34,
|
119 |
+
"acc_stderr": 0.04760952285695235,
|
120 |
+
"acc_norm": 0.34,
|
121 |
+
"acc_norm_stderr": 0.04760952285695235
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5370370370370371,
|
125 |
+
"acc_stderr": 0.04820403072760627,
|
126 |
+
"acc_norm": 0.5370370370370371,
|
127 |
+
"acc_norm_stderr": 0.04820403072760627
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.43842364532019706,
|
131 |
+
"acc_stderr": 0.03491207857486519,
|
132 |
+
"acc_norm": 0.43842364532019706,
|
133 |
+
"acc_norm_stderr": 0.03491207857486519
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.41935483870967744,
|
137 |
+
"acc_stderr": 0.02807158890109185,
|
138 |
+
"acc_norm": 0.41935483870967744,
|
139 |
+
"acc_norm_stderr": 0.02807158890109185
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.688034188034188,
|
143 |
+
"acc_stderr": 0.030351527323344937,
|
144 |
+
"acc_norm": 0.688034188034188,
|
145 |
+
"acc_norm_stderr": 0.030351527323344937
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4339622641509434,
|
149 |
+
"acc_stderr": 0.03050329201334259,
|
150 |
+
"acc_norm": 0.4339622641509434,
|
151 |
+
"acc_norm_stderr": 0.03050329201334259
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.42727272727272725,
|
155 |
+
"acc_stderr": 0.04738198703545483,
|
156 |
+
"acc_norm": 0.42727272727272725,
|
157 |
+
"acc_norm_stderr": 0.04738198703545483
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.46296296296296297,
|
161 |
+
"acc_stderr": 0.030401786406101503,
|
162 |
+
"acc_norm": 0.46296296296296297,
|
163 |
+
"acc_norm_stderr": 0.030401786406101503
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.36423841059602646,
|
167 |
+
"acc_stderr": 0.03929111781242742,
|
168 |
+
"acc_norm": 0.36423841059602646,
|
169 |
+
"acc_norm_stderr": 0.03929111781242742
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5522388059701493,
|
173 |
+
"acc_stderr": 0.03516184772952167,
|
174 |
+
"acc_norm": 0.5522388059701493,
|
175 |
+
"acc_norm_stderr": 0.03516184772952167
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4161849710982659,
|
179 |
+
"acc_stderr": 0.03758517775404947,
|
180 |
+
"acc_norm": 0.4161849710982659,
|
181 |
+
"acc_norm_stderr": 0.03758517775404947
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.5185185185185185,
|
185 |
+
"acc_stderr": 0.02573364199183898,
|
186 |
+
"acc_norm": 0.5185185185185185,
|
187 |
+
"acc_norm_stderr": 0.02573364199183898
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3125,
|
191 |
+
"acc_stderr": 0.038760854559127644,
|
192 |
+
"acc_norm": 0.3125,
|
193 |
+
"acc_norm_stderr": 0.038760854559127644
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.47,
|
197 |
+
"acc_stderr": 0.05016135580465919,
|
198 |
+
"acc_norm": 0.47,
|
199 |
+
"acc_norm_stderr": 0.05016135580465919
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.59,
|
203 |
+
"acc_stderr": 0.04943110704237101,
|
204 |
+
"acc_norm": 0.59,
|
205 |
+
"acc_norm_stderr": 0.04943110704237101
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.48554913294797686,
|
209 |
+
"acc_stderr": 0.02690784985628254,
|
210 |
+
"acc_norm": 0.48554913294797686,
|
211 |
+
"acc_norm_stderr": 0.02690784985628254
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.43558282208588955,
|
215 |
+
"acc_stderr": 0.038956324641389366,
|
216 |
+
"acc_norm": 0.43558282208588955,
|
217 |
+
"acc_norm_stderr": 0.038956324641389366
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4567901234567901,
|
221 |
+
"acc_stderr": 0.027716661650194038,
|
222 |
+
"acc_norm": 0.4567901234567901,
|
223 |
+
"acc_norm_stderr": 0.027716661650194038
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.38,
|
227 |
+
"acc_stderr": 0.048783173121456316,
|
228 |
+
"acc_norm": 0.38,
|
229 |
+
"acc_norm_stderr": 0.048783173121456316
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.44559585492227977,
|
233 |
+
"acc_stderr": 0.03587014986075659,
|
234 |
+
"acc_norm": 0.44559585492227977,
|
235 |
+
"acc_norm_stderr": 0.03587014986075659
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.41228070175438597,
|
239 |
+
"acc_stderr": 0.04630653203366596,
|
240 |
+
"acc_norm": 0.41228070175438597,
|
241 |
+
"acc_norm_stderr": 0.04630653203366596
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.46422018348623856,
|
245 |
+
"acc_stderr": 0.021382364775701906,
|
246 |
+
"acc_norm": 0.46422018348623856,
|
247 |
+
"acc_norm_stderr": 0.021382364775701906
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.4603174603174603,
|
251 |
+
"acc_stderr": 0.04458029125470973,
|
252 |
+
"acc_norm": 0.4603174603174603,
|
253 |
+
"acc_norm_stderr": 0.04458029125470973
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.48366013071895425,
|
257 |
+
"acc_stderr": 0.028614624752805413,
|
258 |
+
"acc_norm": 0.48366013071895425,
|
259 |
+
"acc_norm_stderr": 0.028614624752805413
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.58,
|
263 |
+
"acc_stderr": 0.049604496374885836,
|
264 |
+
"acc_norm": 0.58,
|
265 |
+
"acc_norm_stderr": 0.049604496374885836
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6528925619834711,
|
269 |
+
"acc_stderr": 0.04345724570292534,
|
270 |
+
"acc_norm": 0.6528925619834711,
|
271 |
+
"acc_norm_stderr": 0.04345724570292534
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.45394736842105265,
|
275 |
+
"acc_stderr": 0.04051646342874143,
|
276 |
+
"acc_norm": 0.45394736842105265,
|
277 |
+
"acc_norm_stderr": 0.04051646342874143
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3758169934640523,
|
281 |
+
"acc_stderr": 0.019594021136577454,
|
282 |
+
"acc_norm": 0.3758169934640523,
|
283 |
+
"acc_norm_stderr": 0.019594021136577454
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.4148936170212766,
|
287 |
+
"acc_stderr": 0.0293922365846125,
|
288 |
+
"acc_norm": 0.4148936170212766,
|
289 |
+
"acc_norm_stderr": 0.0293922365846125
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.42857142857142855,
|
293 |
+
"acc_stderr": 0.04697113923010213,
|
294 |
+
"acc_norm": 0.42857142857142855,
|
295 |
+
"acc_norm_stderr": 0.04697113923010213
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.5046296296296297,
|
299 |
+
"acc_stderr": 0.03409825519163572,
|
300 |
+
"acc_norm": 0.5046296296296297,
|
301 |
+
"acc_norm_stderr": 0.03409825519163572
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.28268156424581004,
|
305 |
+
"acc_stderr": 0.015060381730018094,
|
306 |
+
"acc_norm": 0.28268156424581004,
|
307 |
+
"acc_norm_stderr": 0.015060381730018094
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.52,
|
311 |
+
"acc_stderr": 0.050211673156867795,
|
312 |
+
"acc_norm": 0.52,
|
313 |
+
"acc_norm_stderr": 0.050211673156867795
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.71,
|
317 |
+
"acc_stderr": 0.045604802157206845,
|
318 |
+
"acc_norm": 0.71,
|
319 |
+
"acc_norm_stderr": 0.045604802157206845
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.27941176470588236,
|
323 |
+
"acc_stderr": 0.027257202606114944,
|
324 |
+
"acc_norm": 0.27941176470588236,
|
325 |
+
"acc_norm_stderr": 0.027257202606114944
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5306122448979592,
|
329 |
+
"acc_stderr": 0.031949171367580624,
|
330 |
+
"acc_norm": 0.5306122448979592,
|
331 |
+
"acc_norm_stderr": 0.031949171367580624
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.43037974683544306,
|
335 |
+
"acc_stderr": 0.032230171959375976,
|
336 |
+
"acc_norm": 0.43037974683544306,
|
337 |
+
"acc_norm_stderr": 0.032230171959375976
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3520208604954368,
|
341 |
+
"acc_stderr": 0.012198140605353593,
|
342 |
+
"acc_norm": 0.3520208604954368,
|
343 |
+
"acc_norm_stderr": 0.012198140605353593
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4019607843137255,
|
347 |
+
"acc_stderr": 0.034411900234824655,
|
348 |
+
"acc_norm": 0.4019607843137255,
|
349 |
+
"acc_norm_stderr": 0.034411900234824655
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.42424242424242425,
|
353 |
+
"acc_stderr": 0.038592681420702615,
|
354 |
+
"acc_norm": 0.42424242424242425,
|
355 |
+
"acc_norm_stderr": 0.038592681420702615
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.33047735618115054,
|
359 |
+
"mc1_stderr": 0.01646676961369829,
|
360 |
+
"mc2": 0.5010992575203865,
|
361 |
+
"mc2_stderr": 0.016374030576131873
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.47461629279811096,
|
365 |
+
"acc_stderr": 0.01716818720142925,
|
366 |
+
"acc_norm": 0.49586776859504134,
|
367 |
+
"acc_norm_stderr": 0.01718976703213082
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-1.5-9B-Chat",
|
436 |
+
"model_sha": "6afa72fa85c12128e9716fc189b6fc21fe26da83",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
01-ai/Yi-1.5-9B/result_2024-05-13 18:14:57.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.30887372013651876,
|
5 |
+
"acc_stderr": 0.013501770929344003,
|
6 |
+
"acc_norm": 0.3506825938566553,
|
7 |
+
"acc_norm_stderr": 0.013944635930726085
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.33419637522405893,
|
11 |
+
"acc_stderr": 0.004707447244200622,
|
12 |
+
"acc_norm": 0.41674965146385184,
|
13 |
+
"acc_norm_stderr": 0.004920130733271773
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.0383161053282193,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.0383161053282193
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5728155339805825,
|
23 |
+
"acc_stderr": 0.048979577377811674,
|
24 |
+
"acc_norm": 0.5728155339805825,
|
25 |
+
"acc_norm_stderr": 0.048979577377811674
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.46998722860791825,
|
29 |
+
"acc_stderr": 0.017847723086649118,
|
30 |
+
"acc_norm": 0.46998722860791825,
|
31 |
+
"acc_norm_stderr": 0.017847723086649118
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.31851851851851853,
|
35 |
+
"acc_stderr": 0.0402477840197711,
|
36 |
+
"acc_norm": 0.31851851851851853,
|
37 |
+
"acc_norm_stderr": 0.0402477840197711
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.33,
|
41 |
+
"acc_stderr": 0.04725815626252604,
|
42 |
+
"acc_norm": 0.33,
|
43 |
+
"acc_norm_stderr": 0.04725815626252604
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4595744680851064,
|
47 |
+
"acc_stderr": 0.032579014820998356,
|
48 |
+
"acc_norm": 0.4595744680851064,
|
49 |
+
"acc_norm_stderr": 0.032579014820998356
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.40963855421686746,
|
53 |
+
"acc_stderr": 0.03828401115079022,
|
54 |
+
"acc_norm": 0.40963855421686746,
|
55 |
+
"acc_norm_stderr": 0.03828401115079022
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5048231511254019,
|
59 |
+
"acc_stderr": 0.028396770444111298,
|
60 |
+
"acc_norm": 0.5048231511254019,
|
61 |
+
"acc_norm_stderr": 0.028396770444111298
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.43946188340807174,
|
65 |
+
"acc_stderr": 0.03331092511038179,
|
66 |
+
"acc_norm": 0.43946188340807174,
|
67 |
+
"acc_norm_stderr": 0.03331092511038179
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4961832061068702,
|
71 |
+
"acc_stderr": 0.043851623256015534,
|
72 |
+
"acc_norm": 0.4961832061068702,
|
73 |
+
"acc_norm_stderr": 0.043851623256015534
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.49,
|
77 |
+
"acc_stderr": 0.05024183937956913,
|
78 |
+
"acc_norm": 0.49,
|
79 |
+
"acc_norm_stderr": 0.05024183937956913
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5757575757575758,
|
83 |
+
"acc_stderr": 0.03521224908841585,
|
84 |
+
"acc_norm": 0.5757575757575758,
|
85 |
+
"acc_norm_stderr": 0.03521224908841585
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5793103448275863,
|
89 |
+
"acc_stderr": 0.0411391498118926,
|
90 |
+
"acc_norm": 0.5793103448275863,
|
91 |
+
"acc_norm_stderr": 0.0411391498118926
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3333333333333333,
|
95 |
+
"acc_stderr": 0.04690650298201942,
|
96 |
+
"acc_norm": 0.3333333333333333,
|
97 |
+
"acc_norm_stderr": 0.04690650298201942
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5294117647058824,
|
101 |
+
"acc_stderr": 0.032422250271150053,
|
102 |
+
"acc_norm": 0.5294117647058824,
|
103 |
+
"acc_norm_stderr": 0.032422250271150053
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4948717948717949,
|
107 |
+
"acc_stderr": 0.02534967290683866,
|
108 |
+
"acc_norm": 0.4948717948717949,
|
109 |
+
"acc_norm_stderr": 0.02534967290683866
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.61,
|
113 |
+
"acc_stderr": 0.04902071300001975,
|
114 |
+
"acc_norm": 0.61,
|
115 |
+
"acc_norm_stderr": 0.04902071300001975
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.34,
|
119 |
+
"acc_stderr": 0.04760952285695235,
|
120 |
+
"acc_norm": 0.34,
|
121 |
+
"acc_norm_stderr": 0.04760952285695235
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5277777777777778,
|
125 |
+
"acc_stderr": 0.04826217294139894,
|
126 |
+
"acc_norm": 0.5277777777777778,
|
127 |
+
"acc_norm_stderr": 0.04826217294139894
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.45320197044334976,
|
131 |
+
"acc_stderr": 0.035025446508458714,
|
132 |
+
"acc_norm": 0.45320197044334976,
|
133 |
+
"acc_norm_stderr": 0.035025446508458714
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.46774193548387094,
|
137 |
+
"acc_stderr": 0.02838474778881333,
|
138 |
+
"acc_norm": 0.46774193548387094,
|
139 |
+
"acc_norm_stderr": 0.02838474778881333
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.688034188034188,
|
143 |
+
"acc_stderr": 0.030351527323344948,
|
144 |
+
"acc_norm": 0.688034188034188,
|
145 |
+
"acc_norm_stderr": 0.030351527323344948
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4641509433962264,
|
149 |
+
"acc_stderr": 0.030693675018458003,
|
150 |
+
"acc_norm": 0.4641509433962264,
|
151 |
+
"acc_norm_stderr": 0.030693675018458003
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4909090909090909,
|
155 |
+
"acc_stderr": 0.04788339768702861,
|
156 |
+
"acc_norm": 0.4909090909090909,
|
157 |
+
"acc_norm_stderr": 0.04788339768702861
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.42592592592592593,
|
161 |
+
"acc_stderr": 0.03014913560136594,
|
162 |
+
"acc_norm": 0.42592592592592593,
|
163 |
+
"acc_norm_stderr": 0.03014913560136594
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.33774834437086093,
|
167 |
+
"acc_stderr": 0.038615575462551684,
|
168 |
+
"acc_norm": 0.33774834437086093,
|
169 |
+
"acc_norm_stderr": 0.038615575462551684
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.6268656716417911,
|
173 |
+
"acc_stderr": 0.034198326081760065,
|
174 |
+
"acc_norm": 0.6268656716417911,
|
175 |
+
"acc_norm_stderr": 0.034198326081760065
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.36416184971098264,
|
179 |
+
"acc_stderr": 0.03669072477416907,
|
180 |
+
"acc_norm": 0.36416184971098264,
|
181 |
+
"acc_norm_stderr": 0.03669072477416907
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.5317460317460317,
|
185 |
+
"acc_stderr": 0.0256993528321318,
|
186 |
+
"acc_norm": 0.5317460317460317,
|
187 |
+
"acc_norm_stderr": 0.0256993528321318
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2986111111111111,
|
191 |
+
"acc_stderr": 0.03827052357950756,
|
192 |
+
"acc_norm": 0.2986111111111111,
|
193 |
+
"acc_norm_stderr": 0.03827052357950756
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.35,
|
197 |
+
"acc_stderr": 0.047937248544110196,
|
198 |
+
"acc_norm": 0.35,
|
199 |
+
"acc_norm_stderr": 0.047937248544110196
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.57,
|
203 |
+
"acc_stderr": 0.04975698519562426,
|
204 |
+
"acc_norm": 0.57,
|
205 |
+
"acc_norm_stderr": 0.04975698519562426
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5057803468208093,
|
209 |
+
"acc_stderr": 0.026917296179149123,
|
210 |
+
"acc_norm": 0.5057803468208093,
|
211 |
+
"acc_norm_stderr": 0.026917296179149123
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.49079754601226994,
|
215 |
+
"acc_stderr": 0.039277056007874414,
|
216 |
+
"acc_norm": 0.49079754601226994,
|
217 |
+
"acc_norm_stderr": 0.039277056007874414
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49382716049382713,
|
221 |
+
"acc_stderr": 0.027818623962583302,
|
222 |
+
"acc_norm": 0.49382716049382713,
|
223 |
+
"acc_norm_stderr": 0.027818623962583302
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.44,
|
227 |
+
"acc_stderr": 0.04988876515698589,
|
228 |
+
"acc_norm": 0.44,
|
229 |
+
"acc_norm_stderr": 0.04988876515698589
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5129533678756477,
|
233 |
+
"acc_stderr": 0.0360722806104775,
|
234 |
+
"acc_norm": 0.5129533678756477,
|
235 |
+
"acc_norm_stderr": 0.0360722806104775
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.4298245614035088,
|
239 |
+
"acc_stderr": 0.046570472605949646,
|
240 |
+
"acc_norm": 0.4298245614035088,
|
241 |
+
"acc_norm_stderr": 0.046570472605949646
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.4954128440366973,
|
245 |
+
"acc_stderr": 0.021436420955529435,
|
246 |
+
"acc_norm": 0.4954128440366973,
|
247 |
+
"acc_norm_stderr": 0.021436420955529435
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.4523809523809524,
|
251 |
+
"acc_stderr": 0.044518079590553275,
|
252 |
+
"acc_norm": 0.4523809523809524,
|
253 |
+
"acc_norm_stderr": 0.044518079590553275
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4869281045751634,
|
257 |
+
"acc_stderr": 0.028620130800700246,
|
258 |
+
"acc_norm": 0.4869281045751634,
|
259 |
+
"acc_norm_stderr": 0.028620130800700246
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.55,
|
263 |
+
"acc_stderr": 0.05,
|
264 |
+
"acc_norm": 0.55,
|
265 |
+
"acc_norm_stderr": 0.05
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.7107438016528925,
|
269 |
+
"acc_stderr": 0.04139112727635464,
|
270 |
+
"acc_norm": 0.7107438016528925,
|
271 |
+
"acc_norm_stderr": 0.04139112727635464
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.48026315789473684,
|
275 |
+
"acc_stderr": 0.04065771002562605,
|
276 |
+
"acc_norm": 0.48026315789473684,
|
277 |
+
"acc_norm_stderr": 0.04065771002562605
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.41830065359477125,
|
281 |
+
"acc_stderr": 0.019955975145835546,
|
282 |
+
"acc_norm": 0.41830065359477125,
|
283 |
+
"acc_norm_stderr": 0.019955975145835546
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.41134751773049644,
|
287 |
+
"acc_stderr": 0.02935491115994098,
|
288 |
+
"acc_norm": 0.41134751773049644,
|
289 |
+
"acc_norm_stderr": 0.02935491115994098
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.375,
|
293 |
+
"acc_stderr": 0.04595091388086298,
|
294 |
+
"acc_norm": 0.375,
|
295 |
+
"acc_norm_stderr": 0.04595091388086298
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.49074074074074076,
|
299 |
+
"acc_stderr": 0.03409386946992699,
|
300 |
+
"acc_norm": 0.49074074074074076,
|
301 |
+
"acc_norm_stderr": 0.03409386946992699
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.33519553072625696,
|
305 |
+
"acc_stderr": 0.01578800719018589,
|
306 |
+
"acc_norm": 0.33519553072625696,
|
307 |
+
"acc_norm_stderr": 0.01578800719018589
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.44,
|
311 |
+
"acc_stderr": 0.04988876515698589,
|
312 |
+
"acc_norm": 0.44,
|
313 |
+
"acc_norm_stderr": 0.04988876515698589
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.77,
|
317 |
+
"acc_stderr": 0.04229525846816505,
|
318 |
+
"acc_norm": 0.77,
|
319 |
+
"acc_norm_stderr": 0.04229525846816505
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.33088235294117646,
|
323 |
+
"acc_stderr": 0.028582709753898445,
|
324 |
+
"acc_norm": 0.33088235294117646,
|
325 |
+
"acc_norm_stderr": 0.028582709753898445
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.6040816326530613,
|
329 |
+
"acc_stderr": 0.03130802899065686,
|
330 |
+
"acc_norm": 0.6040816326530613,
|
331 |
+
"acc_norm_stderr": 0.03130802899065686
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5021097046413502,
|
335 |
+
"acc_stderr": 0.032546938018020076,
|
336 |
+
"acc_norm": 0.5021097046413502,
|
337 |
+
"acc_norm_stderr": 0.032546938018020076
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3539765319426336,
|
341 |
+
"acc_stderr": 0.01221350473173165,
|
342 |
+
"acc_norm": 0.3539765319426336,
|
343 |
+
"acc_norm_stderr": 0.01221350473173165
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.47549019607843135,
|
347 |
+
"acc_stderr": 0.035050931943487976,
|
348 |
+
"acc_norm": 0.47549019607843135,
|
349 |
+
"acc_norm_stderr": 0.035050931943487976
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.49696969696969695,
|
353 |
+
"acc_stderr": 0.03904272341431857,
|
354 |
+
"acc_norm": 0.49696969696969695,
|
355 |
+
"acc_norm_stderr": 0.03904272341431857
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3047735618115055,
|
359 |
+
"mc1_stderr": 0.01611412415688246,
|
360 |
+
"mc2": 0.48003239735848235,
|
361 |
+
"mc2_stderr": 0.015823495770172346
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.5407319952774499,
|
365 |
+
"acc_stderr": 0.017133218276537666,
|
366 |
+
"acc_norm": 0.5619834710743802,
|
367 |
+
"acc_norm_stderr": 0.01705775370216029
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-1.5-9B",
|
436 |
+
"model_sha": "9a6839c5b9db3dbb245fb98a072bfabc242621f2",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2235494880546075,
|
5 |
+
"acc_stderr": 0.012174896631202614,
|
6 |
+
"acc_norm": 0.26621160409556316,
|
7 |
+
"acc_norm_stderr": 0.012915774781523216
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.30611431985660226,
|
11 |
+
"acc_stderr": 0.004599358920909541,
|
12 |
+
"acc_norm": 0.35222067317267475,
|
13 |
+
"acc_norm_stderr": 0.004766860907171539
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4619883040935672,
|
17 |
+
"acc_stderr": 0.03823727092882307,
|
18 |
+
"acc_norm": 0.4619883040935672,
|
19 |
+
"acc_norm_stderr": 0.03823727092882307
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.4854368932038835,
|
23 |
+
"acc_stderr": 0.04948637324026637,
|
24 |
+
"acc_norm": 0.4854368932038835,
|
25 |
+
"acc_norm_stderr": 0.04948637324026637
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.3537675606641124,
|
29 |
+
"acc_stderr": 0.017098184708161906,
|
30 |
+
"acc_norm": 0.3537675606641124,
|
31 |
+
"acc_norm_stderr": 0.017098184708161906
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.34074074074074073,
|
35 |
+
"acc_stderr": 0.04094376269996794,
|
36 |
+
"acc_norm": 0.34074074074074073,
|
37 |
+
"acc_norm_stderr": 0.04094376269996794
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.24,
|
41 |
+
"acc_stderr": 0.04292346959909283,
|
42 |
+
"acc_norm": 0.24,
|
43 |
+
"acc_norm_stderr": 0.04292346959909283
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3276595744680851,
|
47 |
+
"acc_stderr": 0.030683020843231004,
|
48 |
+
"acc_norm": 0.3276595744680851,
|
49 |
+
"acc_norm_stderr": 0.030683020843231004
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3614457831325301,
|
53 |
+
"acc_stderr": 0.03740059382029319,
|
54 |
+
"acc_norm": 0.3614457831325301,
|
55 |
+
"acc_norm_stderr": 0.03740059382029319
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.43086816720257237,
|
59 |
+
"acc_stderr": 0.028125340983972718,
|
60 |
+
"acc_norm": 0.43086816720257237,
|
61 |
+
"acc_norm_stderr": 0.028125340983972718
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.34977578475336324,
|
65 |
+
"acc_stderr": 0.03200736719484504,
|
66 |
+
"acc_norm": 0.34977578475336324,
|
67 |
+
"acc_norm_stderr": 0.03200736719484504
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.366412213740458,
|
71 |
+
"acc_stderr": 0.04225875451969638,
|
72 |
+
"acc_norm": 0.366412213740458,
|
73 |
+
"acc_norm_stderr": 0.04225875451969638
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.36,
|
77 |
+
"acc_stderr": 0.04824181513244218,
|
78 |
+
"acc_norm": 0.36,
|
79 |
+
"acc_norm_stderr": 0.04824181513244218
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.42424242424242425,
|
83 |
+
"acc_stderr": 0.035212249088415824,
|
84 |
+
"acc_norm": 0.42424242424242425,
|
85 |
+
"acc_norm_stderr": 0.035212249088415824
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5172413793103449,
|
89 |
+
"acc_stderr": 0.04164188720169375,
|
90 |
+
"acc_norm": 0.5172413793103449,
|
91 |
+
"acc_norm_stderr": 0.04164188720169375
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3137254901960784,
|
95 |
+
"acc_stderr": 0.04617034827006718,
|
96 |
+
"acc_norm": 0.3137254901960784,
|
97 |
+
"acc_norm_stderr": 0.04617034827006718
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.46638655462184875,
|
101 |
+
"acc_stderr": 0.03240501447690071,
|
102 |
+
"acc_norm": 0.46638655462184875,
|
103 |
+
"acc_norm_stderr": 0.03240501447690071
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.3641025641025641,
|
107 |
+
"acc_stderr": 0.024396672985094764,
|
108 |
+
"acc_norm": 0.3641025641025641,
|
109 |
+
"acc_norm_stderr": 0.024396672985094764
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.52,
|
113 |
+
"acc_stderr": 0.05021167315686779,
|
114 |
+
"acc_norm": 0.52,
|
115 |
+
"acc_norm_stderr": 0.05021167315686779
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.27,
|
119 |
+
"acc_stderr": 0.04461960433384739,
|
120 |
+
"acc_norm": 0.27,
|
121 |
+
"acc_norm_stderr": 0.04461960433384739
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.4722222222222222,
|
125 |
+
"acc_stderr": 0.04826217294139894,
|
126 |
+
"acc_norm": 0.4722222222222222,
|
127 |
+
"acc_norm_stderr": 0.04826217294139894
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3891625615763547,
|
131 |
+
"acc_stderr": 0.03430462416103872,
|
132 |
+
"acc_norm": 0.3891625615763547,
|
133 |
+
"acc_norm_stderr": 0.03430462416103872
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.38064516129032255,
|
137 |
+
"acc_stderr": 0.027621717832907046,
|
138 |
+
"acc_norm": 0.38064516129032255,
|
139 |
+
"acc_norm_stderr": 0.027621717832907046
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.594017094017094,
|
143 |
+
"acc_stderr": 0.03217180182641086,
|
144 |
+
"acc_norm": 0.594017094017094,
|
145 |
+
"acc_norm_stderr": 0.03217180182641086
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4037735849056604,
|
149 |
+
"acc_stderr": 0.03019761160019795,
|
150 |
+
"acc_norm": 0.4037735849056604,
|
151 |
+
"acc_norm_stderr": 0.03019761160019795
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.41818181818181815,
|
155 |
+
"acc_stderr": 0.0472457740573157,
|
156 |
+
"acc_norm": 0.41818181818181815,
|
157 |
+
"acc_norm_stderr": 0.0472457740573157
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3074074074074074,
|
161 |
+
"acc_stderr": 0.02813325257881563,
|
162 |
+
"acc_norm": 0.3074074074074074,
|
163 |
+
"acc_norm_stderr": 0.02813325257881563
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.271523178807947,
|
167 |
+
"acc_stderr": 0.036313298039696545,
|
168 |
+
"acc_norm": 0.271523178807947,
|
169 |
+
"acc_norm_stderr": 0.036313298039696545
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.4577114427860697,
|
173 |
+
"acc_stderr": 0.035228658640995975,
|
174 |
+
"acc_norm": 0.4577114427860697,
|
175 |
+
"acc_norm_stderr": 0.035228658640995975
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.36416184971098264,
|
179 |
+
"acc_stderr": 0.03669072477416907,
|
180 |
+
"acc_norm": 0.36416184971098264,
|
181 |
+
"acc_norm_stderr": 0.03669072477416907
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3941798941798942,
|
185 |
+
"acc_stderr": 0.02516798233389414,
|
186 |
+
"acc_norm": 0.3941798941798942,
|
187 |
+
"acc_norm_stderr": 0.02516798233389414
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3125,
|
191 |
+
"acc_stderr": 0.038760854559127644,
|
192 |
+
"acc_norm": 0.3125,
|
193 |
+
"acc_norm_stderr": 0.038760854559127644
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.35,
|
197 |
+
"acc_stderr": 0.047937248544110196,
|
198 |
+
"acc_norm": 0.35,
|
199 |
+
"acc_norm_stderr": 0.047937248544110196
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.5,
|
203 |
+
"acc_stderr": 0.050251890762960605,
|
204 |
+
"acc_norm": 0.5,
|
205 |
+
"acc_norm_stderr": 0.050251890762960605
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.4479768786127168,
|
209 |
+
"acc_stderr": 0.026772990653361813,
|
210 |
+
"acc_norm": 0.4479768786127168,
|
211 |
+
"acc_norm_stderr": 0.026772990653361813
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3496932515337423,
|
215 |
+
"acc_stderr": 0.03746668325470022,
|
216 |
+
"acc_norm": 0.3496932515337423,
|
217 |
+
"acc_norm_stderr": 0.03746668325470022
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.37962962962962965,
|
221 |
+
"acc_stderr": 0.027002521034516475,
|
222 |
+
"acc_norm": 0.37962962962962965,
|
223 |
+
"acc_norm_stderr": 0.027002521034516475
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.27,
|
227 |
+
"acc_stderr": 0.04461960433384739,
|
228 |
+
"acc_norm": 0.27,
|
229 |
+
"acc_norm_stderr": 0.04461960433384739
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.43005181347150256,
|
233 |
+
"acc_stderr": 0.035729543331448094,
|
234 |
+
"acc_norm": 0.43005181347150256,
|
235 |
+
"acc_norm_stderr": 0.035729543331448094
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2719298245614035,
|
239 |
+
"acc_stderr": 0.041857744240220575,
|
240 |
+
"acc_norm": 0.2719298245614035,
|
241 |
+
"acc_norm_stderr": 0.041857744240220575
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3779816513761468,
|
245 |
+
"acc_stderr": 0.02078918706672812,
|
246 |
+
"acc_norm": 0.3779816513761468,
|
247 |
+
"acc_norm_stderr": 0.02078918706672812
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.48412698412698413,
|
251 |
+
"acc_stderr": 0.04469881854072606,
|
252 |
+
"acc_norm": 0.48412698412698413,
|
253 |
+
"acc_norm_stderr": 0.04469881854072606
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4411764705882353,
|
257 |
+
"acc_stderr": 0.028431095444176643,
|
258 |
+
"acc_norm": 0.4411764705882353,
|
259 |
+
"acc_norm_stderr": 0.028431095444176643
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.46,
|
263 |
+
"acc_stderr": 0.05009082659620332,
|
264 |
+
"acc_norm": 0.46,
|
265 |
+
"acc_norm_stderr": 0.05009082659620332
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5867768595041323,
|
269 |
+
"acc_stderr": 0.04495087843548408,
|
270 |
+
"acc_norm": 0.5867768595041323,
|
271 |
+
"acc_norm_stderr": 0.04495087843548408
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4144736842105263,
|
275 |
+
"acc_stderr": 0.04008973785779206,
|
276 |
+
"acc_norm": 0.4144736842105263,
|
277 |
+
"acc_norm_stderr": 0.04008973785779206
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3300653594771242,
|
281 |
+
"acc_stderr": 0.019023726160724556,
|
282 |
+
"acc_norm": 0.3300653594771242,
|
283 |
+
"acc_norm_stderr": 0.019023726160724556
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.36879432624113473,
|
287 |
+
"acc_stderr": 0.028782227561347247,
|
288 |
+
"acc_norm": 0.36879432624113473,
|
289 |
+
"acc_norm_stderr": 0.028782227561347247
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.30357142857142855,
|
293 |
+
"acc_stderr": 0.04364226155841044,
|
294 |
+
"acc_norm": 0.30357142857142855,
|
295 |
+
"acc_norm_stderr": 0.04364226155841044
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3611111111111111,
|
299 |
+
"acc_stderr": 0.032757734861009996,
|
300 |
+
"acc_norm": 0.3611111111111111,
|
301 |
+
"acc_norm_stderr": 0.032757734861009996
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.23910614525139665,
|
305 |
+
"acc_stderr": 0.014265554192331161,
|
306 |
+
"acc_norm": 0.23910614525139665,
|
307 |
+
"acc_norm_stderr": 0.014265554192331161
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.42,
|
311 |
+
"acc_stderr": 0.049604496374885836,
|
312 |
+
"acc_norm": 0.42,
|
313 |
+
"acc_norm_stderr": 0.049604496374885836
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.48,
|
317 |
+
"acc_stderr": 0.050211673156867795,
|
318 |
+
"acc_norm": 0.48,
|
319 |
+
"acc_norm_stderr": 0.050211673156867795
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3161764705882353,
|
323 |
+
"acc_stderr": 0.02824568739146291,
|
324 |
+
"acc_norm": 0.3161764705882353,
|
325 |
+
"acc_norm_stderr": 0.02824568739146291
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5755102040816327,
|
329 |
+
"acc_stderr": 0.031642094879429414,
|
330 |
+
"acc_norm": 0.5755102040816327,
|
331 |
+
"acc_norm_stderr": 0.031642094879429414
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.350210970464135,
|
335 |
+
"acc_stderr": 0.03105239193758435,
|
336 |
+
"acc_norm": 0.350210970464135,
|
337 |
+
"acc_norm_stderr": 0.03105239193758435
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3200782268578879,
|
341 |
+
"acc_stderr": 0.011914791947638519,
|
342 |
+
"acc_norm": 0.3200782268578879,
|
343 |
+
"acc_norm_stderr": 0.011914791947638519
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.39215686274509803,
|
347 |
+
"acc_stderr": 0.03426712349247271,
|
348 |
+
"acc_norm": 0.39215686274509803,
|
349 |
+
"acc_norm_stderr": 0.03426712349247271
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3696969696969697,
|
353 |
+
"acc_stderr": 0.03769430314512567,
|
354 |
+
"acc_norm": 0.3696969696969697,
|
355 |
+
"acc_norm_stderr": 0.03769430314512567
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3072215422276622,
|
359 |
+
"mc1_stderr": 0.016150201321323013,
|
360 |
+
"mc2": 0.48699251655132686,
|
361 |
+
"mc2_stderr": 0.016174272005682996
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.33530106257378983,
|
365 |
+
"acc_stderr": 0.016230981232989827,
|
366 |
+
"acc_norm": 0.3742621015348288,
|
367 |
+
"acc_norm_stderr": 0.016637917789798732
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-6B-Chat",
|
436 |
+
"model_sha": "36326f9bc1c8020e0cf29ea830ee5e6679a66a23",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
01-ai/Yi-6B/result_2023-12-27 01:03:17.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2175767918088737,
|
5 |
+
"acc_stderr": 0.012057262020972502,
|
6 |
+
"acc_norm": 0.26109215017064846,
|
7 |
+
"acc_norm_stderr": 0.012835523909473855
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3021310495917148,
|
11 |
+
"acc_stderr": 0.00458243310963648,
|
12 |
+
"acc_norm": 0.35012945628360886,
|
13 |
+
"acc_norm_stderr": 0.004760354191370852
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4678362573099415,
|
17 |
+
"acc_stderr": 0.03826882417660368,
|
18 |
+
"acc_norm": 0.4678362573099415,
|
19 |
+
"acc_norm_stderr": 0.03826882417660368
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.4368932038834951,
|
23 |
+
"acc_stderr": 0.04911147107365777,
|
24 |
+
"acc_norm": 0.4368932038834951,
|
25 |
+
"acc_norm_stderr": 0.04911147107365777
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.39080459770114945,
|
29 |
+
"acc_stderr": 0.01744836606706253,
|
30 |
+
"acc_norm": 0.39080459770114945,
|
31 |
+
"acc_norm_stderr": 0.01744836606706253
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.35555555555555557,
|
35 |
+
"acc_stderr": 0.04135176749720386,
|
36 |
+
"acc_norm": 0.35555555555555557,
|
37 |
+
"acc_norm_stderr": 0.04135176749720386
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.29,
|
41 |
+
"acc_stderr": 0.045604802157206845,
|
42 |
+
"acc_norm": 0.29,
|
43 |
+
"acc_norm_stderr": 0.045604802157206845
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.37446808510638296,
|
47 |
+
"acc_stderr": 0.031639106653672915,
|
48 |
+
"acc_norm": 0.37446808510638296,
|
49 |
+
"acc_norm_stderr": 0.031639106653672915
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3614457831325301,
|
53 |
+
"acc_stderr": 0.03740059382029319,
|
54 |
+
"acc_norm": 0.3614457831325301,
|
55 |
+
"acc_norm_stderr": 0.03740059382029319
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.43086816720257237,
|
59 |
+
"acc_stderr": 0.028125340983972714,
|
60 |
+
"acc_norm": 0.43086816720257237,
|
61 |
+
"acc_norm_stderr": 0.028125340983972714
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.3183856502242152,
|
65 |
+
"acc_stderr": 0.03126580522513713,
|
66 |
+
"acc_norm": 0.3183856502242152,
|
67 |
+
"acc_norm_stderr": 0.03126580522513713
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.3893129770992366,
|
71 |
+
"acc_stderr": 0.04276486542814591,
|
72 |
+
"acc_norm": 0.3893129770992366,
|
73 |
+
"acc_norm_stderr": 0.04276486542814591
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.4,
|
77 |
+
"acc_stderr": 0.049236596391733084,
|
78 |
+
"acc_norm": 0.4,
|
79 |
+
"acc_norm_stderr": 0.049236596391733084
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.42424242424242425,
|
83 |
+
"acc_stderr": 0.035212249088415824,
|
84 |
+
"acc_norm": 0.42424242424242425,
|
85 |
+
"acc_norm_stderr": 0.035212249088415824
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4896551724137931,
|
89 |
+
"acc_stderr": 0.041657747757287644,
|
90 |
+
"acc_norm": 0.4896551724137931,
|
91 |
+
"acc_norm_stderr": 0.041657747757287644
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.29411764705882354,
|
95 |
+
"acc_stderr": 0.04533838195929774,
|
96 |
+
"acc_norm": 0.29411764705882354,
|
97 |
+
"acc_norm_stderr": 0.04533838195929774
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.47058823529411764,
|
101 |
+
"acc_stderr": 0.03242225027115007,
|
102 |
+
"acc_norm": 0.47058823529411764,
|
103 |
+
"acc_norm_stderr": 0.03242225027115007
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.34102564102564104,
|
107 |
+
"acc_stderr": 0.02403548967633507,
|
108 |
+
"acc_norm": 0.34102564102564104,
|
109 |
+
"acc_norm_stderr": 0.02403548967633507
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.54,
|
113 |
+
"acc_stderr": 0.05009082659620332,
|
114 |
+
"acc_norm": 0.54,
|
115 |
+
"acc_norm_stderr": 0.05009082659620332
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.3,
|
119 |
+
"acc_stderr": 0.046056618647183814,
|
120 |
+
"acc_norm": 0.3,
|
121 |
+
"acc_norm_stderr": 0.046056618647183814
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.49074074074074076,
|
125 |
+
"acc_stderr": 0.04832853553437055,
|
126 |
+
"acc_norm": 0.49074074074074076,
|
127 |
+
"acc_norm_stderr": 0.04832853553437055
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3842364532019704,
|
131 |
+
"acc_stderr": 0.03422398565657551,
|
132 |
+
"acc_norm": 0.3842364532019704,
|
133 |
+
"acc_norm_stderr": 0.03422398565657551
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.36774193548387096,
|
137 |
+
"acc_stderr": 0.027430866579973474,
|
138 |
+
"acc_norm": 0.36774193548387096,
|
139 |
+
"acc_norm_stderr": 0.027430866579973474
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6410256410256411,
|
143 |
+
"acc_stderr": 0.03142616993791923,
|
144 |
+
"acc_norm": 0.6410256410256411,
|
145 |
+
"acc_norm_stderr": 0.03142616993791923
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.3660377358490566,
|
149 |
+
"acc_stderr": 0.029647813539365263,
|
150 |
+
"acc_norm": 0.3660377358490566,
|
151 |
+
"acc_norm_stderr": 0.029647813539365263
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.39090909090909093,
|
155 |
+
"acc_stderr": 0.04673752333670237,
|
156 |
+
"acc_norm": 0.39090909090909093,
|
157 |
+
"acc_norm_stderr": 0.04673752333670237
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2851851851851852,
|
161 |
+
"acc_stderr": 0.027528599210340492,
|
162 |
+
"acc_norm": 0.2851851851851852,
|
163 |
+
"acc_norm_stderr": 0.027528599210340492
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2913907284768212,
|
167 |
+
"acc_stderr": 0.03710185726119995,
|
168 |
+
"acc_norm": 0.2913907284768212,
|
169 |
+
"acc_norm_stderr": 0.03710185726119995
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.48756218905472637,
|
173 |
+
"acc_stderr": 0.03534439848539579,
|
174 |
+
"acc_norm": 0.48756218905472637,
|
175 |
+
"acc_norm_stderr": 0.03534439848539579
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3583815028901734,
|
179 |
+
"acc_stderr": 0.03656343653353159,
|
180 |
+
"acc_norm": 0.3583815028901734,
|
181 |
+
"acc_norm_stderr": 0.03656343653353159
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.36772486772486773,
|
185 |
+
"acc_stderr": 0.024833839825562424,
|
186 |
+
"acc_norm": 0.36772486772486773,
|
187 |
+
"acc_norm_stderr": 0.024833839825562424
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3055555555555556,
|
191 |
+
"acc_stderr": 0.03852084696008534,
|
192 |
+
"acc_norm": 0.3055555555555556,
|
193 |
+
"acc_norm_stderr": 0.03852084696008534
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.29,
|
197 |
+
"acc_stderr": 0.045604802157206845,
|
198 |
+
"acc_norm": 0.29,
|
199 |
+
"acc_norm_stderr": 0.045604802157206845
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.54,
|
203 |
+
"acc_stderr": 0.05009082659620333,
|
204 |
+
"acc_norm": 0.54,
|
205 |
+
"acc_norm_stderr": 0.05009082659620333
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.476878612716763,
|
209 |
+
"acc_stderr": 0.026890297881303128,
|
210 |
+
"acc_norm": 0.476878612716763,
|
211 |
+
"acc_norm_stderr": 0.026890297881303128
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.36809815950920244,
|
215 |
+
"acc_stderr": 0.03789213935838396,
|
216 |
+
"acc_norm": 0.36809815950920244,
|
217 |
+
"acc_norm_stderr": 0.03789213935838396
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4012345679012346,
|
221 |
+
"acc_stderr": 0.027272582849839792,
|
222 |
+
"acc_norm": 0.4012345679012346,
|
223 |
+
"acc_norm_stderr": 0.027272582849839792
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.32,
|
227 |
+
"acc_stderr": 0.04688261722621504,
|
228 |
+
"acc_norm": 0.32,
|
229 |
+
"acc_norm_stderr": 0.04688261722621504
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.37823834196891193,
|
233 |
+
"acc_stderr": 0.03499807276193338,
|
234 |
+
"acc_norm": 0.37823834196891193,
|
235 |
+
"acc_norm_stderr": 0.03499807276193338
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2543859649122807,
|
239 |
+
"acc_stderr": 0.040969851398436695,
|
240 |
+
"acc_norm": 0.2543859649122807,
|
241 |
+
"acc_norm_stderr": 0.040969851398436695
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3669724770642202,
|
245 |
+
"acc_stderr": 0.020664675659520536,
|
246 |
+
"acc_norm": 0.3669724770642202,
|
247 |
+
"acc_norm_stderr": 0.020664675659520536
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.42857142857142855,
|
251 |
+
"acc_stderr": 0.0442626668137991,
|
252 |
+
"acc_norm": 0.42857142857142855,
|
253 |
+
"acc_norm_stderr": 0.0442626668137991
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4477124183006536,
|
257 |
+
"acc_stderr": 0.02847293847803353,
|
258 |
+
"acc_norm": 0.4477124183006536,
|
259 |
+
"acc_norm_stderr": 0.02847293847803353
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.48,
|
263 |
+
"acc_stderr": 0.050211673156867795,
|
264 |
+
"acc_norm": 0.48,
|
265 |
+
"acc_norm_stderr": 0.050211673156867795
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5950413223140496,
|
269 |
+
"acc_stderr": 0.04481137755942469,
|
270 |
+
"acc_norm": 0.5950413223140496,
|
271 |
+
"acc_norm_stderr": 0.04481137755942469
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.40131578947368424,
|
275 |
+
"acc_stderr": 0.039889037033362836,
|
276 |
+
"acc_norm": 0.40131578947368424,
|
277 |
+
"acc_norm_stderr": 0.039889037033362836
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.32189542483660133,
|
281 |
+
"acc_stderr": 0.018901015322093092,
|
282 |
+
"acc_norm": 0.32189542483660133,
|
283 |
+
"acc_norm_stderr": 0.018901015322093092
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3900709219858156,
|
287 |
+
"acc_stderr": 0.02909767559946393,
|
288 |
+
"acc_norm": 0.3900709219858156,
|
289 |
+
"acc_norm_stderr": 0.02909767559946393
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.3392857142857143,
|
293 |
+
"acc_stderr": 0.04493949068613539,
|
294 |
+
"acc_norm": 0.3392857142857143,
|
295 |
+
"acc_norm_stderr": 0.04493949068613539
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.39351851851851855,
|
299 |
+
"acc_stderr": 0.03331747876370312,
|
300 |
+
"acc_norm": 0.39351851851851855,
|
301 |
+
"acc_norm_stderr": 0.03331747876370312
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.24581005586592178,
|
305 |
+
"acc_stderr": 0.014400296429225612,
|
306 |
+
"acc_norm": 0.24581005586592178,
|
307 |
+
"acc_norm_stderr": 0.014400296429225612
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.41,
|
311 |
+
"acc_stderr": 0.049431107042371025,
|
312 |
+
"acc_norm": 0.41,
|
313 |
+
"acc_norm_stderr": 0.049431107042371025
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.5,
|
317 |
+
"acc_stderr": 0.050251890762960605,
|
318 |
+
"acc_norm": 0.5,
|
319 |
+
"acc_norm_stderr": 0.050251890762960605
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.35661764705882354,
|
323 |
+
"acc_stderr": 0.02909720956841196,
|
324 |
+
"acc_norm": 0.35661764705882354,
|
325 |
+
"acc_norm_stderr": 0.02909720956841196
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.563265306122449,
|
329 |
+
"acc_stderr": 0.031751952375833226,
|
330 |
+
"acc_norm": 0.563265306122449,
|
331 |
+
"acc_norm_stderr": 0.031751952375833226
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.3459915611814346,
|
335 |
+
"acc_stderr": 0.030964810588786713,
|
336 |
+
"acc_norm": 0.3459915611814346,
|
337 |
+
"acc_norm_stderr": 0.030964810588786713
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3318122555410691,
|
341 |
+
"acc_stderr": 0.012026088259897634,
|
342 |
+
"acc_norm": 0.3318122555410691,
|
343 |
+
"acc_norm_stderr": 0.012026088259897634
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4068627450980392,
|
347 |
+
"acc_stderr": 0.03447891136353382,
|
348 |
+
"acc_norm": 0.4068627450980392,
|
349 |
+
"acc_norm_stderr": 0.03447891136353382
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.4303030303030303,
|
353 |
+
"acc_stderr": 0.03866225962879077,
|
354 |
+
"acc_norm": 0.4303030303030303,
|
355 |
+
"acc_norm_stderr": 0.03866225962879077
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2998776009791922,
|
359 |
+
"mc1_stderr": 0.016040352966713616,
|
360 |
+
"mc2": 0.47599173122840593,
|
361 |
+
"mc2_stderr": 0.015773874222919516
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.3305785123966942,
|
365 |
+
"acc_stderr": 0.0161734232988457,
|
366 |
+
"acc_norm": 0.40968122786304606,
|
367 |
+
"acc_norm_stderr": 0.01690756819221947
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-6B",
|
436 |
+
"model_sha": "b881162e08d0fa65011cb53f2c51544e1b623112",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
01-ai/Yi-9B-200K/result_2024-07-08 15:13:14.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2832764505119454,
|
5 |
+
"acc_stderr": 0.013167478735134576,
|
6 |
+
"acc_norm": 0.3378839590443686,
|
7 |
+
"acc_norm_stderr": 0.013822047922283523
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3271260705038837,
|
11 |
+
"acc_stderr": 0.004682048906622317,
|
12 |
+
"acc_norm": 0.40290778729336785,
|
13 |
+
"acc_norm_stderr": 0.00489480111989861
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4444444444444444,
|
17 |
+
"acc_stderr": 0.03811079669833531,
|
18 |
+
"acc_norm": 0.4444444444444444,
|
19 |
+
"acc_norm_stderr": 0.03811079669833531
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5533980582524272,
|
23 |
+
"acc_stderr": 0.04922424153458935,
|
24 |
+
"acc_norm": 0.5533980582524272,
|
25 |
+
"acc_norm_stderr": 0.04922424153458935
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.48020434227330777,
|
29 |
+
"acc_stderr": 0.017865944827291605,
|
30 |
+
"acc_norm": 0.48020434227330777,
|
31 |
+
"acc_norm_stderr": 0.017865944827291605
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.2740740740740741,
|
35 |
+
"acc_stderr": 0.03853254836552003,
|
36 |
+
"acc_norm": 0.2740740740740741,
|
37 |
+
"acc_norm_stderr": 0.03853254836552003
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.44680851063829785,
|
47 |
+
"acc_stderr": 0.0325005368436584,
|
48 |
+
"acc_norm": 0.44680851063829785,
|
49 |
+
"acc_norm_stderr": 0.0325005368436584
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.43373493975903615,
|
53 |
+
"acc_stderr": 0.03858158940685515,
|
54 |
+
"acc_norm": 0.43373493975903615,
|
55 |
+
"acc_norm_stderr": 0.03858158940685515
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4887459807073955,
|
59 |
+
"acc_stderr": 0.028390897396863533,
|
60 |
+
"acc_norm": 0.4887459807073955,
|
61 |
+
"acc_norm_stderr": 0.028390897396863533
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4798206278026906,
|
65 |
+
"acc_stderr": 0.033530461674123,
|
66 |
+
"acc_norm": 0.4798206278026906,
|
67 |
+
"acc_norm_stderr": 0.033530461674123
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.48091603053435117,
|
71 |
+
"acc_stderr": 0.04382094705550989,
|
72 |
+
"acc_norm": 0.48091603053435117,
|
73 |
+
"acc_norm_stderr": 0.04382094705550989
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.43,
|
77 |
+
"acc_stderr": 0.049756985195624284,
|
78 |
+
"acc_norm": 0.43,
|
79 |
+
"acc_norm_stderr": 0.049756985195624284
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5404040404040404,
|
83 |
+
"acc_stderr": 0.035507024651313425,
|
84 |
+
"acc_norm": 0.5404040404040404,
|
85 |
+
"acc_norm_stderr": 0.035507024651313425
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5586206896551724,
|
89 |
+
"acc_stderr": 0.04137931034482757,
|
90 |
+
"acc_norm": 0.5586206896551724,
|
91 |
+
"acc_norm_stderr": 0.04137931034482757
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.29411764705882354,
|
95 |
+
"acc_stderr": 0.04533838195929775,
|
96 |
+
"acc_norm": 0.29411764705882354,
|
97 |
+
"acc_norm_stderr": 0.04533838195929775
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5252100840336135,
|
101 |
+
"acc_stderr": 0.0324371805513741,
|
102 |
+
"acc_norm": 0.5252100840336135,
|
103 |
+
"acc_norm_stderr": 0.0324371805513741
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4846153846153846,
|
107 |
+
"acc_stderr": 0.025339003010106515,
|
108 |
+
"acc_norm": 0.4846153846153846,
|
109 |
+
"acc_norm_stderr": 0.025339003010106515
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.58,
|
113 |
+
"acc_stderr": 0.04960449637488583,
|
114 |
+
"acc_norm": 0.58,
|
115 |
+
"acc_norm_stderr": 0.04960449637488583
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.39,
|
119 |
+
"acc_stderr": 0.04902071300001975,
|
120 |
+
"acc_norm": 0.39,
|
121 |
+
"acc_norm_stderr": 0.04902071300001975
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5648148148148148,
|
125 |
+
"acc_stderr": 0.04792898170907061,
|
126 |
+
"acc_norm": 0.5648148148148148,
|
127 |
+
"acc_norm_stderr": 0.04792898170907061
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.43349753694581283,
|
131 |
+
"acc_stderr": 0.034867317274198714,
|
132 |
+
"acc_norm": 0.43349753694581283,
|
133 |
+
"acc_norm_stderr": 0.034867317274198714
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.4838709677419355,
|
137 |
+
"acc_stderr": 0.028429203176724555,
|
138 |
+
"acc_norm": 0.4838709677419355,
|
139 |
+
"acc_norm_stderr": 0.028429203176724555
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.688034188034188,
|
143 |
+
"acc_stderr": 0.030351527323344944,
|
144 |
+
"acc_norm": 0.688034188034188,
|
145 |
+
"acc_norm_stderr": 0.030351527323344944
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4339622641509434,
|
149 |
+
"acc_stderr": 0.03050329201334259,
|
150 |
+
"acc_norm": 0.4339622641509434,
|
151 |
+
"acc_norm_stderr": 0.03050329201334259
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4818181818181818,
|
155 |
+
"acc_stderr": 0.04785964010794916,
|
156 |
+
"acc_norm": 0.4818181818181818,
|
157 |
+
"acc_norm_stderr": 0.04785964010794916
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.4111111111111111,
|
161 |
+
"acc_stderr": 0.029999923508706675,
|
162 |
+
"acc_norm": 0.4111111111111111,
|
163 |
+
"acc_norm_stderr": 0.029999923508706675
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.31788079470198677,
|
167 |
+
"acc_stderr": 0.038020397601079024,
|
168 |
+
"acc_norm": 0.31788079470198677,
|
169 |
+
"acc_norm_stderr": 0.038020397601079024
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.582089552238806,
|
173 |
+
"acc_stderr": 0.034875586404620636,
|
174 |
+
"acc_norm": 0.582089552238806,
|
175 |
+
"acc_norm_stderr": 0.034875586404620636
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4393063583815029,
|
179 |
+
"acc_stderr": 0.037842719328874674,
|
180 |
+
"acc_norm": 0.4393063583815029,
|
181 |
+
"acc_norm_stderr": 0.037842719328874674
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.4973544973544973,
|
185 |
+
"acc_stderr": 0.025750949678130387,
|
186 |
+
"acc_norm": 0.4973544973544973,
|
187 |
+
"acc_norm_stderr": 0.025750949678130387
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3055555555555556,
|
191 |
+
"acc_stderr": 0.03852084696008534,
|
192 |
+
"acc_norm": 0.3055555555555556,
|
193 |
+
"acc_norm_stderr": 0.03852084696008534
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.39,
|
197 |
+
"acc_stderr": 0.04902071300001974,
|
198 |
+
"acc_norm": 0.39,
|
199 |
+
"acc_norm_stderr": 0.04902071300001974
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.59,
|
203 |
+
"acc_stderr": 0.04943110704237101,
|
204 |
+
"acc_norm": 0.59,
|
205 |
+
"acc_norm_stderr": 0.04943110704237101
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.546242774566474,
|
209 |
+
"acc_stderr": 0.026803720583206167,
|
210 |
+
"acc_norm": 0.546242774566474,
|
211 |
+
"acc_norm_stderr": 0.026803720583206167
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.48466257668711654,
|
215 |
+
"acc_stderr": 0.03926522378708843,
|
216 |
+
"acc_norm": 0.48466257668711654,
|
217 |
+
"acc_norm_stderr": 0.03926522378708843
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49382716049382713,
|
221 |
+
"acc_stderr": 0.027818623962583302,
|
222 |
+
"acc_norm": 0.49382716049382713,
|
223 |
+
"acc_norm_stderr": 0.027818623962583302
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.43,
|
227 |
+
"acc_stderr": 0.049756985195624284,
|
228 |
+
"acc_norm": 0.43,
|
229 |
+
"acc_norm_stderr": 0.049756985195624284
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.48704663212435234,
|
233 |
+
"acc_stderr": 0.036072280610477486,
|
234 |
+
"acc_norm": 0.48704663212435234,
|
235 |
+
"acc_norm_stderr": 0.036072280610477486
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.3508771929824561,
|
239 |
+
"acc_stderr": 0.04489539350270697,
|
240 |
+
"acc_norm": 0.3508771929824561,
|
241 |
+
"acc_norm_stderr": 0.04489539350270697
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.47889908256880737,
|
245 |
+
"acc_stderr": 0.02141822475426464,
|
246 |
+
"acc_norm": 0.47889908256880737,
|
247 |
+
"acc_norm_stderr": 0.02141822475426464
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.4444444444444444,
|
251 |
+
"acc_stderr": 0.044444444444444495,
|
252 |
+
"acc_norm": 0.4444444444444444,
|
253 |
+
"acc_norm_stderr": 0.044444444444444495
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.46405228758169936,
|
257 |
+
"acc_stderr": 0.028555827516528784,
|
258 |
+
"acc_norm": 0.46405228758169936,
|
259 |
+
"acc_norm_stderr": 0.028555827516528784
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.55,
|
263 |
+
"acc_stderr": 0.05,
|
264 |
+
"acc_norm": 0.55,
|
265 |
+
"acc_norm_stderr": 0.05
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6859504132231405,
|
269 |
+
"acc_stderr": 0.042369647530410184,
|
270 |
+
"acc_norm": 0.6859504132231405,
|
271 |
+
"acc_norm_stderr": 0.042369647530410184
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4605263157894737,
|
275 |
+
"acc_stderr": 0.04056242252249034,
|
276 |
+
"acc_norm": 0.4605263157894737,
|
277 |
+
"acc_norm_stderr": 0.04056242252249034
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.41013071895424835,
|
281 |
+
"acc_stderr": 0.019898412717635903,
|
282 |
+
"acc_norm": 0.41013071895424835,
|
283 |
+
"acc_norm_stderr": 0.019898412717635903
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.36524822695035464,
|
287 |
+
"acc_stderr": 0.028723863853281278,
|
288 |
+
"acc_norm": 0.36524822695035464,
|
289 |
+
"acc_norm_stderr": 0.028723863853281278
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.4017857142857143,
|
293 |
+
"acc_stderr": 0.04653333146973646,
|
294 |
+
"acc_norm": 0.4017857142857143,
|
295 |
+
"acc_norm_stderr": 0.04653333146973646
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4351851851851852,
|
299 |
+
"acc_stderr": 0.033812000056435254,
|
300 |
+
"acc_norm": 0.4351851851851852,
|
301 |
+
"acc_norm_stderr": 0.033812000056435254
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.3139664804469274,
|
305 |
+
"acc_stderr": 0.01552192393352364,
|
306 |
+
"acc_norm": 0.3139664804469274,
|
307 |
+
"acc_norm_stderr": 0.01552192393352364
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.55,
|
311 |
+
"acc_stderr": 0.049999999999999996,
|
312 |
+
"acc_norm": 0.55,
|
313 |
+
"acc_norm_stderr": 0.049999999999999996
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.71,
|
317 |
+
"acc_stderr": 0.045604802157206845,
|
318 |
+
"acc_norm": 0.71,
|
319 |
+
"acc_norm_stderr": 0.045604802157206845
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3125,
|
323 |
+
"acc_stderr": 0.02815637344037142,
|
324 |
+
"acc_norm": 0.3125,
|
325 |
+
"acc_norm_stderr": 0.02815637344037142
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5877551020408164,
|
329 |
+
"acc_stderr": 0.0315123604467427,
|
330 |
+
"acc_norm": 0.5877551020408164,
|
331 |
+
"acc_norm_stderr": 0.0315123604467427
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5654008438818565,
|
335 |
+
"acc_stderr": 0.03226759995510145,
|
336 |
+
"acc_norm": 0.5654008438818565,
|
337 |
+
"acc_norm_stderr": 0.03226759995510145
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.37157757496740546,
|
341 |
+
"acc_stderr": 0.012341828514528289,
|
342 |
+
"acc_norm": 0.37157757496740546,
|
343 |
+
"acc_norm_stderr": 0.012341828514528289
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.44607843137254904,
|
347 |
+
"acc_stderr": 0.03488845451304974,
|
348 |
+
"acc_norm": 0.44607843137254904,
|
349 |
+
"acc_norm_stderr": 0.03488845451304974
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.4666666666666667,
|
353 |
+
"acc_stderr": 0.038956580652718446,
|
354 |
+
"acc_norm": 0.4666666666666667,
|
355 |
+
"acc_norm_stderr": 0.038956580652718446
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.30354957160342716,
|
359 |
+
"mc1_stderr": 0.016095884155386854,
|
360 |
+
"mc2": 0.47423242047841757,
|
361 |
+
"mc2_stderr": 0.015707621226106624
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4887839433293979,
|
365 |
+
"acc_stderr": 0.017186028469489283,
|
366 |
+
"acc_norm": 0.5171192443919717,
|
367 |
+
"acc_norm_stderr": 0.01718027524608563
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-9B-200K",
|
436 |
+
"model_sha": "0d1bd7c8efcce669cb35edda2106ddc8dbcd7dff",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
01-ai/Yi-9B/result_2024-05-16 07:03:08.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.29180887372013653,
|
5 |
+
"acc_stderr": 0.013284525292403506,
|
6 |
+
"acc_norm": 0.3430034129692833,
|
7 |
+
"acc_norm_stderr": 0.013872423223718169
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.32822146982672773,
|
11 |
+
"acc_stderr": 0.004686062421158143,
|
12 |
+
"acc_norm": 0.4041027683728341,
|
13 |
+
"acc_norm_stderr": 0.004897146690596263
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5029239766081871,
|
17 |
+
"acc_stderr": 0.03834759370936839,
|
18 |
+
"acc_norm": 0.5029239766081871,
|
19 |
+
"acc_norm_stderr": 0.03834759370936839
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5728155339805825,
|
23 |
+
"acc_stderr": 0.04897957737781168,
|
24 |
+
"acc_norm": 0.5728155339805825,
|
25 |
+
"acc_norm_stderr": 0.04897957737781168
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.44189016602809705,
|
29 |
+
"acc_stderr": 0.01775880053421441,
|
30 |
+
"acc_norm": 0.44189016602809705,
|
31 |
+
"acc_norm_stderr": 0.01775880053421441
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.26666666666666666,
|
35 |
+
"acc_stderr": 0.03820169914517905,
|
36 |
+
"acc_norm": 0.26666666666666666,
|
37 |
+
"acc_norm_stderr": 0.03820169914517905
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4297872340425532,
|
47 |
+
"acc_stderr": 0.03236214467715564,
|
48 |
+
"acc_norm": 0.4297872340425532,
|
49 |
+
"acc_norm_stderr": 0.03236214467715564
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.43373493975903615,
|
53 |
+
"acc_stderr": 0.03858158940685515,
|
54 |
+
"acc_norm": 0.43373493975903615,
|
55 |
+
"acc_norm_stderr": 0.03858158940685515
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4662379421221865,
|
59 |
+
"acc_stderr": 0.028333277109562783,
|
60 |
+
"acc_norm": 0.4662379421221865,
|
61 |
+
"acc_norm_stderr": 0.028333277109562783
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.43946188340807174,
|
65 |
+
"acc_stderr": 0.03331092511038179,
|
66 |
+
"acc_norm": 0.43946188340807174,
|
67 |
+
"acc_norm_stderr": 0.03331092511038179
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5114503816793893,
|
71 |
+
"acc_stderr": 0.04384140024078016,
|
72 |
+
"acc_norm": 0.5114503816793893,
|
73 |
+
"acc_norm_stderr": 0.04384140024078016
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.49,
|
77 |
+
"acc_stderr": 0.05024183937956913,
|
78 |
+
"acc_norm": 0.49,
|
79 |
+
"acc_norm_stderr": 0.05024183937956913
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5656565656565656,
|
83 |
+
"acc_stderr": 0.03531505879359183,
|
84 |
+
"acc_norm": 0.5656565656565656,
|
85 |
+
"acc_norm_stderr": 0.03531505879359183
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5310344827586206,
|
89 |
+
"acc_stderr": 0.04158632762097828,
|
90 |
+
"acc_norm": 0.5310344827586206,
|
91 |
+
"acc_norm_stderr": 0.04158632762097828
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3137254901960784,
|
95 |
+
"acc_stderr": 0.04617034827006717,
|
96 |
+
"acc_norm": 0.3137254901960784,
|
97 |
+
"acc_norm_stderr": 0.04617034827006717
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5378151260504201,
|
101 |
+
"acc_stderr": 0.0323854694875898,
|
102 |
+
"acc_norm": 0.5378151260504201,
|
103 |
+
"acc_norm_stderr": 0.0323854694875898
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4794871794871795,
|
107 |
+
"acc_stderr": 0.025329663163489943,
|
108 |
+
"acc_norm": 0.4794871794871795,
|
109 |
+
"acc_norm_stderr": 0.025329663163489943
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.59,
|
113 |
+
"acc_stderr": 0.04943110704237102,
|
114 |
+
"acc_norm": 0.59,
|
115 |
+
"acc_norm_stderr": 0.04943110704237102
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.27,
|
119 |
+
"acc_stderr": 0.0446196043338474,
|
120 |
+
"acc_norm": 0.27,
|
121 |
+
"acc_norm_stderr": 0.0446196043338474
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.6111111111111112,
|
125 |
+
"acc_stderr": 0.04712821257426769,
|
126 |
+
"acc_norm": 0.6111111111111112,
|
127 |
+
"acc_norm_stderr": 0.04712821257426769
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.42857142857142855,
|
131 |
+
"acc_stderr": 0.03481904844438803,
|
132 |
+
"acc_norm": 0.42857142857142855,
|
133 |
+
"acc_norm_stderr": 0.03481904844438803
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.43870967741935485,
|
137 |
+
"acc_stderr": 0.028229497320317223,
|
138 |
+
"acc_norm": 0.43870967741935485,
|
139 |
+
"acc_norm_stderr": 0.028229497320317223
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.7435897435897436,
|
143 |
+
"acc_stderr": 0.028605953702004243,
|
144 |
+
"acc_norm": 0.7435897435897436,
|
145 |
+
"acc_norm_stderr": 0.028605953702004243
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4679245283018868,
|
149 |
+
"acc_stderr": 0.030709486992556545,
|
150 |
+
"acc_norm": 0.4679245283018868,
|
151 |
+
"acc_norm_stderr": 0.030709486992556545
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.45454545454545453,
|
155 |
+
"acc_stderr": 0.04769300568972743,
|
156 |
+
"acc_norm": 0.45454545454545453,
|
157 |
+
"acc_norm_stderr": 0.04769300568972743
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.37777777777777777,
|
161 |
+
"acc_stderr": 0.029560707392465715,
|
162 |
+
"acc_norm": 0.37777777777777777,
|
163 |
+
"acc_norm_stderr": 0.029560707392465715
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2980132450331126,
|
167 |
+
"acc_stderr": 0.037345356767871984,
|
168 |
+
"acc_norm": 0.2980132450331126,
|
169 |
+
"acc_norm_stderr": 0.037345356767871984
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5621890547263682,
|
173 |
+
"acc_stderr": 0.035080801121998406,
|
174 |
+
"acc_norm": 0.5621890547263682,
|
175 |
+
"acc_norm_stderr": 0.035080801121998406
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4277456647398844,
|
179 |
+
"acc_stderr": 0.037724468575180276,
|
180 |
+
"acc_norm": 0.4277456647398844,
|
181 |
+
"acc_norm_stderr": 0.037724468575180276
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.49206349206349204,
|
185 |
+
"acc_stderr": 0.025748065871673286,
|
186 |
+
"acc_norm": 0.49206349206349204,
|
187 |
+
"acc_norm_stderr": 0.025748065871673286
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3263888888888889,
|
191 |
+
"acc_stderr": 0.03921067198982266,
|
192 |
+
"acc_norm": 0.3263888888888889,
|
193 |
+
"acc_norm_stderr": 0.03921067198982266
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.34,
|
197 |
+
"acc_stderr": 0.04760952285695235,
|
198 |
+
"acc_norm": 0.34,
|
199 |
+
"acc_norm_stderr": 0.04760952285695235
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.53,
|
203 |
+
"acc_stderr": 0.050161355804659205,
|
204 |
+
"acc_norm": 0.53,
|
205 |
+
"acc_norm_stderr": 0.050161355804659205
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.47109826589595377,
|
209 |
+
"acc_stderr": 0.02687408588351835,
|
210 |
+
"acc_norm": 0.47109826589595377,
|
211 |
+
"acc_norm_stderr": 0.02687408588351835
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44785276073619634,
|
215 |
+
"acc_stderr": 0.03906947479456601,
|
216 |
+
"acc_norm": 0.44785276073619634,
|
217 |
+
"acc_norm_stderr": 0.03906947479456601
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4845679012345679,
|
221 |
+
"acc_stderr": 0.02780749004427621,
|
222 |
+
"acc_norm": 0.4845679012345679,
|
223 |
+
"acc_norm_stderr": 0.02780749004427621
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.35,
|
227 |
+
"acc_stderr": 0.0479372485441102,
|
228 |
+
"acc_norm": 0.35,
|
229 |
+
"acc_norm_stderr": 0.0479372485441102
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.47150259067357514,
|
233 |
+
"acc_stderr": 0.03602573571288441,
|
234 |
+
"acc_norm": 0.47150259067357514,
|
235 |
+
"acc_norm_stderr": 0.03602573571288441
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.37719298245614036,
|
239 |
+
"acc_stderr": 0.04559522141958216,
|
240 |
+
"acc_norm": 0.37719298245614036,
|
241 |
+
"acc_norm_stderr": 0.04559522141958216
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5009174311926605,
|
245 |
+
"acc_stderr": 0.021437287056051215,
|
246 |
+
"acc_norm": 0.5009174311926605,
|
247 |
+
"acc_norm_stderr": 0.021437287056051215
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.4523809523809524,
|
251 |
+
"acc_stderr": 0.044518079590553275,
|
252 |
+
"acc_norm": 0.4523809523809524,
|
253 |
+
"acc_norm_stderr": 0.044518079590553275
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4869281045751634,
|
257 |
+
"acc_stderr": 0.028620130800700246,
|
258 |
+
"acc_norm": 0.4869281045751634,
|
259 |
+
"acc_norm_stderr": 0.028620130800700246
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.61,
|
263 |
+
"acc_stderr": 0.04902071300001975,
|
264 |
+
"acc_norm": 0.61,
|
265 |
+
"acc_norm_stderr": 0.04902071300001975
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.628099173553719,
|
269 |
+
"acc_stderr": 0.04412015806624504,
|
270 |
+
"acc_norm": 0.628099173553719,
|
271 |
+
"acc_norm_stderr": 0.04412015806624504
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4144736842105263,
|
275 |
+
"acc_stderr": 0.04008973785779207,
|
276 |
+
"acc_norm": 0.4144736842105263,
|
277 |
+
"acc_norm_stderr": 0.04008973785779207
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3839869281045752,
|
281 |
+
"acc_stderr": 0.019675808135281532,
|
282 |
+
"acc_norm": 0.3839869281045752,
|
283 |
+
"acc_norm_stderr": 0.019675808135281532
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3900709219858156,
|
287 |
+
"acc_stderr": 0.029097675599463926,
|
288 |
+
"acc_norm": 0.3900709219858156,
|
289 |
+
"acc_norm_stderr": 0.029097675599463926
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.44642857142857145,
|
293 |
+
"acc_stderr": 0.04718471485219588,
|
294 |
+
"acc_norm": 0.44642857142857145,
|
295 |
+
"acc_norm_stderr": 0.04718471485219588
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.49537037037037035,
|
299 |
+
"acc_stderr": 0.03409825519163572,
|
300 |
+
"acc_norm": 0.49537037037037035,
|
301 |
+
"acc_norm_stderr": 0.03409825519163572
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2547486033519553,
|
305 |
+
"acc_stderr": 0.014572650383409153,
|
306 |
+
"acc_norm": 0.2547486033519553,
|
307 |
+
"acc_norm_stderr": 0.014572650383409153
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.45,
|
311 |
+
"acc_stderr": 0.05,
|
312 |
+
"acc_norm": 0.45,
|
313 |
+
"acc_norm_stderr": 0.05
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.73,
|
317 |
+
"acc_stderr": 0.044619604333847394,
|
318 |
+
"acc_norm": 0.73,
|
319 |
+
"acc_norm_stderr": 0.044619604333847394
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.35661764705882354,
|
323 |
+
"acc_stderr": 0.029097209568411952,
|
324 |
+
"acc_norm": 0.35661764705882354,
|
325 |
+
"acc_norm_stderr": 0.029097209568411952
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5673469387755102,
|
329 |
+
"acc_stderr": 0.031717528240626645,
|
330 |
+
"acc_norm": 0.5673469387755102,
|
331 |
+
"acc_norm_stderr": 0.031717528240626645
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5021097046413502,
|
335 |
+
"acc_stderr": 0.032546938018020076,
|
336 |
+
"acc_norm": 0.5021097046413502,
|
337 |
+
"acc_norm_stderr": 0.032546938018020076
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.33833116036505867,
|
341 |
+
"acc_stderr": 0.01208426562634422,
|
342 |
+
"acc_norm": 0.33833116036505867,
|
343 |
+
"acc_norm_stderr": 0.01208426562634422
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4117647058823529,
|
347 |
+
"acc_stderr": 0.0345423658538061,
|
348 |
+
"acc_norm": 0.4117647058823529,
|
349 |
+
"acc_norm_stderr": 0.0345423658538061
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3575757575757576,
|
353 |
+
"acc_stderr": 0.037425970438065864,
|
354 |
+
"acc_norm": 0.3575757575757576,
|
355 |
+
"acc_norm_stderr": 0.037425970438065864
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.29008567931456547,
|
359 |
+
"mc1_stderr": 0.01588623687420952,
|
360 |
+
"mc2": 0.47399915157940936,
|
361 |
+
"mc2_stderr": 0.015777434106257295
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4805194805194805,
|
365 |
+
"acc_stderr": 0.01717730199234255,
|
366 |
+
"acc_norm": 0.5053128689492326,
|
367 |
+
"acc_norm_stderr": 0.01718938362722971
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "01-ai/Yi-9B",
|
436 |
+
"model_sha": "95b8e272566167182ef1c53657a97d87a4084c9e",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
12thD/I-SOLAR-10.7B-dpo-sft-v0.1/result_2024-04-09 05:16:32.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.6612627986348123,
|
5 |
+
"acc_stderr": 0.01383056892797433,
|
6 |
+
"acc_norm": 0.7209897610921502,
|
7 |
+
"acc_norm_stderr": 0.013106784883601336
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.4347739494124676,
|
11 |
+
"acc_stderr": 0.0049471417973841305,
|
12 |
+
"acc_norm": 0.5764787890858395,
|
13 |
+
"acc_norm_stderr": 0.004931065434173685
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.6783625730994152,
|
17 |
+
"acc_stderr": 0.03582529442573122,
|
18 |
+
"acc_norm": 0.6783625730994152,
|
19 |
+
"acc_norm_stderr": 0.03582529442573122
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.7087378640776699,
|
23 |
+
"acc_stderr": 0.04498676320572924,
|
24 |
+
"acc_norm": 0.7087378640776699,
|
25 |
+
"acc_norm_stderr": 0.04498676320572924
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.6807151979565773,
|
29 |
+
"acc_stderr": 0.016671261749538736,
|
30 |
+
"acc_norm": 0.6807151979565773,
|
31 |
+
"acc_norm_stderr": 0.016671261749538736
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.43703703703703706,
|
35 |
+
"acc_stderr": 0.04284958639753398,
|
36 |
+
"acc_norm": 0.43703703703703706,
|
37 |
+
"acc_norm_stderr": 0.04284958639753398
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.5319148936170213,
|
47 |
+
"acc_stderr": 0.03261936918467381,
|
48 |
+
"acc_norm": 0.5319148936170213,
|
49 |
+
"acc_norm_stderr": 0.03261936918467381
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.4759036144578313,
|
53 |
+
"acc_stderr": 0.03887971849597264,
|
54 |
+
"acc_norm": 0.4759036144578313,
|
55 |
+
"acc_norm_stderr": 0.03887971849597264
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.6334405144694534,
|
59 |
+
"acc_stderr": 0.02736807824397164,
|
60 |
+
"acc_norm": 0.6334405144694534,
|
61 |
+
"acc_norm_stderr": 0.02736807824397164
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.6322869955156951,
|
65 |
+
"acc_stderr": 0.03236198350928275,
|
66 |
+
"acc_norm": 0.6322869955156951,
|
67 |
+
"acc_norm_stderr": 0.03236198350928275
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5343511450381679,
|
71 |
+
"acc_stderr": 0.04374928560599738,
|
72 |
+
"acc_norm": 0.5343511450381679,
|
73 |
+
"acc_norm_stderr": 0.04374928560599738
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.51,
|
77 |
+
"acc_stderr": 0.050241839379569095,
|
78 |
+
"acc_norm": 0.51,
|
79 |
+
"acc_norm_stderr": 0.050241839379569095
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.7171717171717171,
|
83 |
+
"acc_stderr": 0.032087795587867514,
|
84 |
+
"acc_norm": 0.7171717171717171,
|
85 |
+
"acc_norm_stderr": 0.032087795587867514
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.46206896551724136,
|
89 |
+
"acc_stderr": 0.041546596717075474,
|
90 |
+
"acc_norm": 0.46206896551724136,
|
91 |
+
"acc_norm_stderr": 0.041546596717075474
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3137254901960784,
|
95 |
+
"acc_stderr": 0.04617034827006717,
|
96 |
+
"acc_norm": 0.3137254901960784,
|
97 |
+
"acc_norm_stderr": 0.04617034827006717
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.6176470588235294,
|
101 |
+
"acc_stderr": 0.031566630992154156,
|
102 |
+
"acc_norm": 0.6176470588235294,
|
103 |
+
"acc_norm_stderr": 0.031566630992154156
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.5846153846153846,
|
107 |
+
"acc_stderr": 0.024985354923102353,
|
108 |
+
"acc_norm": 0.5846153846153846,
|
109 |
+
"acc_norm_stderr": 0.024985354923102353
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.66,
|
113 |
+
"acc_stderr": 0.04760952285695238,
|
114 |
+
"acc_norm": 0.66,
|
115 |
+
"acc_norm_stderr": 0.04760952285695238
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.04824181513244218,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.04824181513244218
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.6111111111111112,
|
125 |
+
"acc_stderr": 0.04712821257426769,
|
126 |
+
"acc_norm": 0.6111111111111112,
|
127 |
+
"acc_norm_stderr": 0.04712821257426769
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4187192118226601,
|
131 |
+
"acc_stderr": 0.03471192860518468,
|
132 |
+
"acc_norm": 0.4187192118226601,
|
133 |
+
"acc_norm_stderr": 0.03471192860518468
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.6161290322580645,
|
137 |
+
"acc_stderr": 0.02766618207553963,
|
138 |
+
"acc_norm": 0.6161290322580645,
|
139 |
+
"acc_norm_stderr": 0.02766618207553963
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.8290598290598291,
|
143 |
+
"acc_stderr": 0.024662496845209804,
|
144 |
+
"acc_norm": 0.8290598290598291,
|
145 |
+
"acc_norm_stderr": 0.024662496845209804
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.569811320754717,
|
149 |
+
"acc_stderr": 0.030471445867183235,
|
150 |
+
"acc_norm": 0.569811320754717,
|
151 |
+
"acc_norm_stderr": 0.030471445867183235
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.5727272727272728,
|
155 |
+
"acc_stderr": 0.047381987035454834,
|
156 |
+
"acc_norm": 0.5727272727272728,
|
157 |
+
"acc_norm_stderr": 0.047381987035454834
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3851851851851852,
|
161 |
+
"acc_stderr": 0.029670906124630886,
|
162 |
+
"acc_norm": 0.3851851851851852,
|
163 |
+
"acc_norm_stderr": 0.029670906124630886
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.36423841059602646,
|
167 |
+
"acc_stderr": 0.03929111781242741,
|
168 |
+
"acc_norm": 0.36423841059602646,
|
169 |
+
"acc_norm_stderr": 0.03929111781242741
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.6467661691542289,
|
173 |
+
"acc_stderr": 0.03379790611796777,
|
174 |
+
"acc_norm": 0.6467661691542289,
|
175 |
+
"acc_norm_stderr": 0.03379790611796777
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.5606936416184971,
|
179 |
+
"acc_stderr": 0.037842719328874674,
|
180 |
+
"acc_norm": 0.5606936416184971,
|
181 |
+
"acc_norm_stderr": 0.037842719328874674
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.37566137566137564,
|
185 |
+
"acc_stderr": 0.02494236893115979,
|
186 |
+
"acc_norm": 0.37566137566137564,
|
187 |
+
"acc_norm_stderr": 0.02494236893115979
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4861111111111111,
|
191 |
+
"acc_stderr": 0.04179596617581,
|
192 |
+
"acc_norm": 0.4861111111111111,
|
193 |
+
"acc_norm_stderr": 0.04179596617581
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.4,
|
197 |
+
"acc_stderr": 0.049236596391733084,
|
198 |
+
"acc_norm": 0.4,
|
199 |
+
"acc_norm_stderr": 0.049236596391733084
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.72,
|
203 |
+
"acc_stderr": 0.04512608598542126,
|
204 |
+
"acc_norm": 0.72,
|
205 |
+
"acc_norm_stderr": 0.04512608598542126
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.6127167630057804,
|
209 |
+
"acc_stderr": 0.026226158605124655,
|
210 |
+
"acc_norm": 0.6127167630057804,
|
211 |
+
"acc_norm_stderr": 0.026226158605124655
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.5398773006134969,
|
215 |
+
"acc_stderr": 0.03915857291436972,
|
216 |
+
"acc_norm": 0.5398773006134969,
|
217 |
+
"acc_norm_stderr": 0.03915857291436972
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.6296296296296297,
|
221 |
+
"acc_stderr": 0.026869490744815264,
|
222 |
+
"acc_norm": 0.6296296296296297,
|
223 |
+
"acc_norm_stderr": 0.026869490744815264
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.35,
|
227 |
+
"acc_stderr": 0.047937248544110196,
|
228 |
+
"acc_norm": 0.35,
|
229 |
+
"acc_norm_stderr": 0.047937248544110196
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.6528497409326425,
|
233 |
+
"acc_stderr": 0.03435696168361356,
|
234 |
+
"acc_norm": 0.6528497409326425,
|
235 |
+
"acc_norm_stderr": 0.03435696168361356
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.39473684210526316,
|
239 |
+
"acc_stderr": 0.04598188057816542,
|
240 |
+
"acc_norm": 0.39473684210526316,
|
241 |
+
"acc_norm_stderr": 0.04598188057816542
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.7027522935779816,
|
245 |
+
"acc_stderr": 0.019595707224643558,
|
246 |
+
"acc_norm": 0.7027522935779816,
|
247 |
+
"acc_norm_stderr": 0.019595707224643558
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3492063492063492,
|
251 |
+
"acc_stderr": 0.04263906892795132,
|
252 |
+
"acc_norm": 0.3492063492063492,
|
253 |
+
"acc_norm_stderr": 0.04263906892795132
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.5294117647058824,
|
257 |
+
"acc_stderr": 0.028580341065138293,
|
258 |
+
"acc_norm": 0.5294117647058824,
|
259 |
+
"acc_norm_stderr": 0.028580341065138293
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.64,
|
263 |
+
"acc_stderr": 0.04824181513244218,
|
264 |
+
"acc_norm": 0.64,
|
265 |
+
"acc_norm_stderr": 0.04824181513244218
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6694214876033058,
|
269 |
+
"acc_stderr": 0.04294340845212093,
|
270 |
+
"acc_norm": 0.6694214876033058,
|
271 |
+
"acc_norm_stderr": 0.04294340845212093
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.618421052631579,
|
275 |
+
"acc_stderr": 0.03953173377749194,
|
276 |
+
"acc_norm": 0.618421052631579,
|
277 |
+
"acc_norm_stderr": 0.03953173377749194
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.5326797385620915,
|
281 |
+
"acc_stderr": 0.020184583359102195,
|
282 |
+
"acc_norm": 0.5326797385620915,
|
283 |
+
"acc_norm_stderr": 0.020184583359102195
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3900709219858156,
|
287 |
+
"acc_stderr": 0.02909767559946393,
|
288 |
+
"acc_norm": 0.3900709219858156,
|
289 |
+
"acc_norm_stderr": 0.02909767559946393
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.4017857142857143,
|
293 |
+
"acc_stderr": 0.04653333146973646,
|
294 |
+
"acc_norm": 0.4017857142857143,
|
295 |
+
"acc_norm_stderr": 0.04653333146973646
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4444444444444444,
|
299 |
+
"acc_stderr": 0.03388857118502325,
|
300 |
+
"acc_norm": 0.4444444444444444,
|
301 |
+
"acc_norm_stderr": 0.03388857118502325
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2748603351955307,
|
305 |
+
"acc_stderr": 0.014931316703220517,
|
306 |
+
"acc_norm": 0.2748603351955307,
|
307 |
+
"acc_norm_stderr": 0.014931316703220517
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.45,
|
311 |
+
"acc_stderr": 0.049999999999999996,
|
312 |
+
"acc_norm": 0.45,
|
313 |
+
"acc_norm_stderr": 0.049999999999999996
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.67,
|
317 |
+
"acc_stderr": 0.047258156262526094,
|
318 |
+
"acc_norm": 0.67,
|
319 |
+
"acc_norm_stderr": 0.047258156262526094
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.48161764705882354,
|
323 |
+
"acc_stderr": 0.03035230339535196,
|
324 |
+
"acc_norm": 0.48161764705882354,
|
325 |
+
"acc_norm_stderr": 0.03035230339535196
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.6040816326530613,
|
329 |
+
"acc_stderr": 0.031308028990656864,
|
330 |
+
"acc_norm": 0.6040816326530613,
|
331 |
+
"acc_norm_stderr": 0.031308028990656864
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6751054852320675,
|
335 |
+
"acc_stderr": 0.030486039389105296,
|
336 |
+
"acc_norm": 0.6751054852320675,
|
337 |
+
"acc_norm_stderr": 0.030486039389105296
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3983050847457627,
|
341 |
+
"acc_stderr": 0.012503310565166235,
|
342 |
+
"acc_norm": 0.3983050847457627,
|
343 |
+
"acc_norm_stderr": 0.012503310565166235
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.5980392156862745,
|
347 |
+
"acc_stderr": 0.03441190023482465,
|
348 |
+
"acc_norm": 0.5980392156862745,
|
349 |
+
"acc_norm_stderr": 0.03441190023482465
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5333333333333333,
|
353 |
+
"acc_stderr": 0.03895658065271847,
|
354 |
+
"acc_norm": 0.5333333333333333,
|
355 |
+
"acc_norm_stderr": 0.03895658065271847
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.5642594859241126,
|
359 |
+
"mc1_stderr": 0.01735834539886313,
|
360 |
+
"mc2": 0.6711090076900339,
|
361 |
+
"mc2_stderr": 0.014635725108441697
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.45808736717827625,
|
365 |
+
"acc_stderr": 0.017129852117911147,
|
366 |
+
"acc_norm": 0.5159386068476978,
|
367 |
+
"acc_norm_stderr": 0.017181617837190195
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "12thD/I-SOLAR-10.7B-dpo-sft-v0.1",
|
436 |
+
"model_sha": "38206239efc52267202f79250058496d78de4585",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
12thD/ko-Llama-3-8B-sft-v0.1/result_2024-04-22 02:51:25.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3703071672354949,
|
5 |
+
"acc_stderr": 0.01411129875167495,
|
6 |
+
"acc_norm": 0.4249146757679181,
|
7 |
+
"acc_norm_stderr": 0.014445698968520769
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.37621987651862177,
|
11 |
+
"acc_stderr": 0.004834461997944866,
|
12 |
+
"acc_norm": 0.498406691894045,
|
13 |
+
"acc_norm_stderr": 0.004989756076956349
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5497076023391813,
|
17 |
+
"acc_stderr": 0.03815827365913237,
|
18 |
+
"acc_norm": 0.5497076023391813,
|
19 |
+
"acc_norm_stderr": 0.03815827365913237
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.6213592233009708,
|
23 |
+
"acc_stderr": 0.048026946982589726,
|
24 |
+
"acc_norm": 0.6213592233009708,
|
25 |
+
"acc_norm_stderr": 0.048026946982589726
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.47509578544061304,
|
29 |
+
"acc_stderr": 0.01785777070490102,
|
30 |
+
"acc_norm": 0.47509578544061304,
|
31 |
+
"acc_norm_stderr": 0.01785777070490102
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.3851851851851852,
|
35 |
+
"acc_stderr": 0.042039210401562783,
|
36 |
+
"acc_norm": 0.3851851851851852,
|
37 |
+
"acc_norm_stderr": 0.042039210401562783
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.24,
|
41 |
+
"acc_stderr": 0.04292346959909284,
|
42 |
+
"acc_norm": 0.24,
|
43 |
+
"acc_norm_stderr": 0.04292346959909284
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.44680851063829785,
|
47 |
+
"acc_stderr": 0.032500536843658404,
|
48 |
+
"acc_norm": 0.44680851063829785,
|
49 |
+
"acc_norm_stderr": 0.032500536843658404
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.39156626506024095,
|
53 |
+
"acc_stderr": 0.03799857454479637,
|
54 |
+
"acc_norm": 0.39156626506024095,
|
55 |
+
"acc_norm_stderr": 0.03799857454479637
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5401929260450161,
|
59 |
+
"acc_stderr": 0.028306190403305693,
|
60 |
+
"acc_norm": 0.5401929260450161,
|
61 |
+
"acc_norm_stderr": 0.028306190403305693
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.5112107623318386,
|
65 |
+
"acc_stderr": 0.033549366530984746,
|
66 |
+
"acc_norm": 0.5112107623318386,
|
67 |
+
"acc_norm_stderr": 0.033549366530984746
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5267175572519084,
|
71 |
+
"acc_stderr": 0.04379024936553894,
|
72 |
+
"acc_norm": 0.5267175572519084,
|
73 |
+
"acc_norm_stderr": 0.04379024936553894
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.48,
|
77 |
+
"acc_stderr": 0.05021167315686779,
|
78 |
+
"acc_norm": 0.48,
|
79 |
+
"acc_norm_stderr": 0.05021167315686779
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5202020202020202,
|
83 |
+
"acc_stderr": 0.035594435655639196,
|
84 |
+
"acc_norm": 0.5202020202020202,
|
85 |
+
"acc_norm_stderr": 0.035594435655639196
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5448275862068965,
|
89 |
+
"acc_stderr": 0.04149886942192117,
|
90 |
+
"acc_norm": 0.5448275862068965,
|
91 |
+
"acc_norm_stderr": 0.04149886942192117
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.27450980392156865,
|
95 |
+
"acc_stderr": 0.04440521906179328,
|
96 |
+
"acc_norm": 0.27450980392156865,
|
97 |
+
"acc_norm_stderr": 0.04440521906179328
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5672268907563025,
|
101 |
+
"acc_stderr": 0.032183581077426124,
|
102 |
+
"acc_norm": 0.5672268907563025,
|
103 |
+
"acc_norm_stderr": 0.032183581077426124
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.5205128205128206,
|
107 |
+
"acc_stderr": 0.02532966316348994,
|
108 |
+
"acc_norm": 0.5205128205128206,
|
109 |
+
"acc_norm_stderr": 0.02532966316348994
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.56,
|
113 |
+
"acc_stderr": 0.04988876515698589,
|
114 |
+
"acc_norm": 0.56,
|
115 |
+
"acc_norm_stderr": 0.04988876515698589
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.35,
|
119 |
+
"acc_stderr": 0.047937248544110196,
|
120 |
+
"acc_norm": 0.35,
|
121 |
+
"acc_norm_stderr": 0.047937248544110196
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.6111111111111112,
|
125 |
+
"acc_stderr": 0.0471282125742677,
|
126 |
+
"acc_norm": 0.6111111111111112,
|
127 |
+
"acc_norm_stderr": 0.0471282125742677
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4433497536945813,
|
131 |
+
"acc_stderr": 0.03495334582162934,
|
132 |
+
"acc_norm": 0.4433497536945813,
|
133 |
+
"acc_norm_stderr": 0.03495334582162934
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.5032258064516129,
|
137 |
+
"acc_stderr": 0.028443414226438316,
|
138 |
+
"acc_norm": 0.5032258064516129,
|
139 |
+
"acc_norm_stderr": 0.028443414226438316
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.7521367521367521,
|
143 |
+
"acc_stderr": 0.028286324075564407,
|
144 |
+
"acc_norm": 0.7521367521367521,
|
145 |
+
"acc_norm_stderr": 0.028286324075564407
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.5056603773584906,
|
149 |
+
"acc_stderr": 0.03077090076385131,
|
150 |
+
"acc_norm": 0.5056603773584906,
|
151 |
+
"acc_norm_stderr": 0.03077090076385131
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.5363636363636364,
|
155 |
+
"acc_stderr": 0.04776449162396197,
|
156 |
+
"acc_norm": 0.5363636363636364,
|
157 |
+
"acc_norm_stderr": 0.04776449162396197
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.35185185185185186,
|
161 |
+
"acc_stderr": 0.029116617606083015,
|
162 |
+
"acc_norm": 0.35185185185185186,
|
163 |
+
"acc_norm_stderr": 0.029116617606083015
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.40397350993377484,
|
167 |
+
"acc_stderr": 0.0400648568536534,
|
168 |
+
"acc_norm": 0.40397350993377484,
|
169 |
+
"acc_norm_stderr": 0.0400648568536534
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.6517412935323383,
|
173 |
+
"acc_stderr": 0.033687874661154596,
|
174 |
+
"acc_norm": 0.6517412935323383,
|
175 |
+
"acc_norm_stderr": 0.033687874661154596
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.44508670520231214,
|
179 |
+
"acc_stderr": 0.03789401760283648,
|
180 |
+
"acc_norm": 0.44508670520231214,
|
181 |
+
"acc_norm_stderr": 0.03789401760283648
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.35185185185185186,
|
185 |
+
"acc_stderr": 0.02459497512892094,
|
186 |
+
"acc_norm": 0.35185185185185186,
|
187 |
+
"acc_norm_stderr": 0.02459497512892094
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4791666666666667,
|
191 |
+
"acc_stderr": 0.041775789507399935,
|
192 |
+
"acc_norm": 0.4791666666666667,
|
193 |
+
"acc_norm_stderr": 0.041775789507399935
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.41,
|
197 |
+
"acc_stderr": 0.049431107042371025,
|
198 |
+
"acc_norm": 0.41,
|
199 |
+
"acc_norm_stderr": 0.049431107042371025
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.68,
|
203 |
+
"acc_stderr": 0.04688261722621504,
|
204 |
+
"acc_norm": 0.68,
|
205 |
+
"acc_norm_stderr": 0.04688261722621504
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5433526011560693,
|
209 |
+
"acc_stderr": 0.026817718130348916,
|
210 |
+
"acc_norm": 0.5433526011560693,
|
211 |
+
"acc_norm_stderr": 0.026817718130348916
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.4601226993865031,
|
215 |
+
"acc_stderr": 0.0391585729143697,
|
216 |
+
"acc_norm": 0.4601226993865031,
|
217 |
+
"acc_norm_stderr": 0.0391585729143697
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.5061728395061729,
|
221 |
+
"acc_stderr": 0.02781862396258329,
|
222 |
+
"acc_norm": 0.5061728395061729,
|
223 |
+
"acc_norm_stderr": 0.02781862396258329
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.33,
|
227 |
+
"acc_stderr": 0.047258156262526045,
|
228 |
+
"acc_norm": 0.33,
|
229 |
+
"acc_norm_stderr": 0.047258156262526045
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5647668393782384,
|
233 |
+
"acc_stderr": 0.03578038165008585,
|
234 |
+
"acc_norm": 0.5647668393782384,
|
235 |
+
"acc_norm_stderr": 0.03578038165008585
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.32456140350877194,
|
239 |
+
"acc_stderr": 0.04404556157374768,
|
240 |
+
"acc_norm": 0.32456140350877194,
|
241 |
+
"acc_norm_stderr": 0.04404556157374768
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5706422018348624,
|
245 |
+
"acc_stderr": 0.021222286397236508,
|
246 |
+
"acc_norm": 0.5706422018348624,
|
247 |
+
"acc_norm_stderr": 0.021222286397236508
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.36507936507936506,
|
251 |
+
"acc_stderr": 0.043062412591271526,
|
252 |
+
"acc_norm": 0.36507936507936506,
|
253 |
+
"acc_norm_stderr": 0.043062412591271526
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.49673202614379086,
|
257 |
+
"acc_stderr": 0.02862930519400354,
|
258 |
+
"acc_norm": 0.49673202614379086,
|
259 |
+
"acc_norm_stderr": 0.02862930519400354
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.48,
|
263 |
+
"acc_stderr": 0.050211673156867795,
|
264 |
+
"acc_norm": 0.48,
|
265 |
+
"acc_norm_stderr": 0.050211673156867795
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6694214876033058,
|
269 |
+
"acc_stderr": 0.04294340845212094,
|
270 |
+
"acc_norm": 0.6694214876033058,
|
271 |
+
"acc_norm_stderr": 0.04294340845212094
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.48026315789473684,
|
275 |
+
"acc_stderr": 0.040657710025626057,
|
276 |
+
"acc_norm": 0.48026315789473684,
|
277 |
+
"acc_norm_stderr": 0.040657710025626057
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.4395424836601307,
|
281 |
+
"acc_stderr": 0.020079420408087925,
|
282 |
+
"acc_norm": 0.4395424836601307,
|
283 |
+
"acc_norm_stderr": 0.020079420408087925
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.32269503546099293,
|
287 |
+
"acc_stderr": 0.02788913930053478,
|
288 |
+
"acc_norm": 0.32269503546099293,
|
289 |
+
"acc_norm_stderr": 0.02788913930053478
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.375,
|
293 |
+
"acc_stderr": 0.04595091388086298,
|
294 |
+
"acc_norm": 0.375,
|
295 |
+
"acc_norm_stderr": 0.04595091388086298
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4583333333333333,
|
299 |
+
"acc_stderr": 0.033981108902946366,
|
300 |
+
"acc_norm": 0.4583333333333333,
|
301 |
+
"acc_norm_stderr": 0.033981108902946366
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2346368715083799,
|
305 |
+
"acc_stderr": 0.01417304409830367,
|
306 |
+
"acc_norm": 0.2346368715083799,
|
307 |
+
"acc_norm_stderr": 0.01417304409830367
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.42,
|
311 |
+
"acc_stderr": 0.049604496374885836,
|
312 |
+
"acc_norm": 0.42,
|
313 |
+
"acc_norm_stderr": 0.049604496374885836
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.64,
|
317 |
+
"acc_stderr": 0.04824181513244218,
|
318 |
+
"acc_norm": 0.64,
|
319 |
+
"acc_norm_stderr": 0.04824181513244218
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.40441176470588236,
|
323 |
+
"acc_stderr": 0.029812630701569743,
|
324 |
+
"acc_norm": 0.40441176470588236,
|
325 |
+
"acc_norm_stderr": 0.029812630701569743
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.6244897959183674,
|
329 |
+
"acc_stderr": 0.031001209039894843,
|
330 |
+
"acc_norm": 0.6244897959183674,
|
331 |
+
"acc_norm_stderr": 0.031001209039894843
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6455696202531646,
|
335 |
+
"acc_stderr": 0.0311373042971858,
|
336 |
+
"acc_norm": 0.6455696202531646,
|
337 |
+
"acc_norm_stderr": 0.0311373042971858
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.35853976531942633,
|
341 |
+
"acc_stderr": 0.012248487319682737,
|
342 |
+
"acc_norm": 0.35853976531942633,
|
343 |
+
"acc_norm_stderr": 0.012248487319682737
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.5686274509803921,
|
347 |
+
"acc_stderr": 0.034760990605016355,
|
348 |
+
"acc_norm": 0.5686274509803921,
|
349 |
+
"acc_norm_stderr": 0.034760990605016355
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.6181818181818182,
|
353 |
+
"acc_stderr": 0.037937131711656344,
|
354 |
+
"acc_norm": 0.6181818181818182,
|
355 |
+
"acc_norm_stderr": 0.037937131711656344
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3219094247246022,
|
359 |
+
"mc1_stderr": 0.016355567611960383,
|
360 |
+
"mc2": 0.4972168450482467,
|
361 |
+
"mc2_stderr": 0.015567232356568489
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4805194805194805,
|
365 |
+
"acc_stderr": 0.01717730199234255,
|
366 |
+
"acc_norm": 0.525383707201889,
|
367 |
+
"acc_norm_stderr": 0.017168187201429253
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "12thD/ko-Llama-3-8B-sft-v0.1",
|
436 |
+
"model_sha": "5aca578ff0479831b5417ce031693c1f97899620",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
12thD/ko-Llama-3-8B-sft-v0.3/result_2024-05-02 01:25:36.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.34044368600682595,
|
5 |
+
"acc_stderr": 0.013847460518892976,
|
6 |
+
"acc_norm": 0.4044368600682594,
|
7 |
+
"acc_norm_stderr": 0.014342036483436175
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.37183827922724555,
|
11 |
+
"acc_stderr": 0.004823078145064962,
|
12 |
+
"acc_norm": 0.4825731925911173,
|
13 |
+
"acc_norm_stderr": 0.004986749760948692
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5614035087719298,
|
17 |
+
"acc_stderr": 0.038057975055904594,
|
18 |
+
"acc_norm": 0.5614035087719298,
|
19 |
+
"acc_norm_stderr": 0.038057975055904594
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5922330097087378,
|
23 |
+
"acc_stderr": 0.04865777570410769,
|
24 |
+
"acc_norm": 0.5922330097087378,
|
25 |
+
"acc_norm_stderr": 0.04865777570410769
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.4878671775223499,
|
29 |
+
"acc_stderr": 0.017874698667491345,
|
30 |
+
"acc_norm": 0.4878671775223499,
|
31 |
+
"acc_norm_stderr": 0.017874698667491345
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4074074074074074,
|
35 |
+
"acc_stderr": 0.042446332383532286,
|
36 |
+
"acc_norm": 0.4074074074074074,
|
37 |
+
"acc_norm_stderr": 0.042446332383532286
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.32,
|
41 |
+
"acc_stderr": 0.046882617226215034,
|
42 |
+
"acc_norm": 0.32,
|
43 |
+
"acc_norm_stderr": 0.046882617226215034
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.43829787234042555,
|
47 |
+
"acc_stderr": 0.03243618636108102,
|
48 |
+
"acc_norm": 0.43829787234042555,
|
49 |
+
"acc_norm_stderr": 0.03243618636108102
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3795180722891566,
|
53 |
+
"acc_stderr": 0.037777988227480165,
|
54 |
+
"acc_norm": 0.3795180722891566,
|
55 |
+
"acc_norm_stderr": 0.037777988227480165
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5337620578778135,
|
59 |
+
"acc_stderr": 0.0283332771095628,
|
60 |
+
"acc_norm": 0.5337620578778135,
|
61 |
+
"acc_norm_stderr": 0.0283332771095628
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.43946188340807174,
|
65 |
+
"acc_stderr": 0.03331092511038179,
|
66 |
+
"acc_norm": 0.43946188340807174,
|
67 |
+
"acc_norm_stderr": 0.03331092511038179
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5419847328244275,
|
71 |
+
"acc_stderr": 0.04369802690578757,
|
72 |
+
"acc_norm": 0.5419847328244275,
|
73 |
+
"acc_norm_stderr": 0.04369802690578757
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.44,
|
77 |
+
"acc_stderr": 0.04988876515698589,
|
78 |
+
"acc_norm": 0.44,
|
79 |
+
"acc_norm_stderr": 0.04988876515698589
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5707070707070707,
|
83 |
+
"acc_stderr": 0.035265527246011986,
|
84 |
+
"acc_norm": 0.5707070707070707,
|
85 |
+
"acc_norm_stderr": 0.035265527246011986
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5793103448275863,
|
89 |
+
"acc_stderr": 0.0411391498118926,
|
90 |
+
"acc_norm": 0.5793103448275863,
|
91 |
+
"acc_norm_stderr": 0.0411391498118926
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.29411764705882354,
|
95 |
+
"acc_stderr": 0.04533838195929776,
|
96 |
+
"acc_norm": 0.29411764705882354,
|
97 |
+
"acc_norm_stderr": 0.04533838195929776
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5546218487394958,
|
101 |
+
"acc_stderr": 0.03228410626716391,
|
102 |
+
"acc_norm": 0.5546218487394958,
|
103 |
+
"acc_norm_stderr": 0.03228410626716391
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.5,
|
107 |
+
"acc_stderr": 0.02535100632816969,
|
108 |
+
"acc_norm": 0.5,
|
109 |
+
"acc_norm_stderr": 0.02535100632816969
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.62,
|
113 |
+
"acc_stderr": 0.048783173121456316,
|
114 |
+
"acc_norm": 0.62,
|
115 |
+
"acc_norm_stderr": 0.048783173121456316
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.39,
|
119 |
+
"acc_stderr": 0.04902071300001974,
|
120 |
+
"acc_norm": 0.39,
|
121 |
+
"acc_norm_stderr": 0.04902071300001974
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.6018518518518519,
|
125 |
+
"acc_stderr": 0.04732332615978814,
|
126 |
+
"acc_norm": 0.6018518518518519,
|
127 |
+
"acc_norm_stderr": 0.04732332615978814
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.42857142857142855,
|
131 |
+
"acc_stderr": 0.034819048444388045,
|
132 |
+
"acc_norm": 0.42857142857142855,
|
133 |
+
"acc_norm_stderr": 0.034819048444388045
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.5129032258064516,
|
137 |
+
"acc_stderr": 0.02843453315268186,
|
138 |
+
"acc_norm": 0.5129032258064516,
|
139 |
+
"acc_norm_stderr": 0.02843453315268186
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.7350427350427351,
|
143 |
+
"acc_stderr": 0.028911208802749472,
|
144 |
+
"acc_norm": 0.7350427350427351,
|
145 |
+
"acc_norm_stderr": 0.028911208802749472
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.5018867924528302,
|
149 |
+
"acc_stderr": 0.030772653642075664,
|
150 |
+
"acc_norm": 0.5018867924528302,
|
151 |
+
"acc_norm_stderr": 0.030772653642075664
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4909090909090909,
|
155 |
+
"acc_stderr": 0.04788339768702861,
|
156 |
+
"acc_norm": 0.4909090909090909,
|
157 |
+
"acc_norm_stderr": 0.04788339768702861
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.37037037037037035,
|
161 |
+
"acc_stderr": 0.02944316932303154,
|
162 |
+
"acc_norm": 0.37037037037037035,
|
163 |
+
"acc_norm_stderr": 0.02944316932303154
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.36423841059602646,
|
167 |
+
"acc_stderr": 0.03929111781242741,
|
168 |
+
"acc_norm": 0.36423841059602646,
|
169 |
+
"acc_norm_stderr": 0.03929111781242741
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.6467661691542289,
|
173 |
+
"acc_stderr": 0.03379790611796777,
|
174 |
+
"acc_norm": 0.6467661691542289,
|
175 |
+
"acc_norm_stderr": 0.03379790611796777
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4161849710982659,
|
179 |
+
"acc_stderr": 0.037585177754049466,
|
180 |
+
"acc_norm": 0.4161849710982659,
|
181 |
+
"acc_norm_stderr": 0.037585177754049466
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.36507936507936506,
|
185 |
+
"acc_stderr": 0.02479606060269995,
|
186 |
+
"acc_norm": 0.36507936507936506,
|
187 |
+
"acc_norm_stderr": 0.02479606060269995
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4930555555555556,
|
191 |
+
"acc_stderr": 0.04180806750294938,
|
192 |
+
"acc_norm": 0.4930555555555556,
|
193 |
+
"acc_norm_stderr": 0.04180806750294938
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.43,
|
197 |
+
"acc_stderr": 0.049756985195624284,
|
198 |
+
"acc_norm": 0.43,
|
199 |
+
"acc_norm_stderr": 0.049756985195624284
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.62,
|
203 |
+
"acc_stderr": 0.04878317312145634,
|
204 |
+
"acc_norm": 0.62,
|
205 |
+
"acc_norm_stderr": 0.04878317312145634
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5317919075144508,
|
209 |
+
"acc_stderr": 0.026864624366756656,
|
210 |
+
"acc_norm": 0.5317919075144508,
|
211 |
+
"acc_norm_stderr": 0.026864624366756656
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44171779141104295,
|
215 |
+
"acc_stderr": 0.03901591825836184,
|
216 |
+
"acc_norm": 0.44171779141104295,
|
217 |
+
"acc_norm_stderr": 0.03901591825836184
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49382716049382713,
|
221 |
+
"acc_stderr": 0.027818623962583295,
|
222 |
+
"acc_norm": 0.49382716049382713,
|
223 |
+
"acc_norm_stderr": 0.027818623962583295
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.35,
|
227 |
+
"acc_stderr": 0.047937248544110196,
|
228 |
+
"acc_norm": 0.35,
|
229 |
+
"acc_norm_stderr": 0.047937248544110196
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5440414507772021,
|
233 |
+
"acc_stderr": 0.03594413711272437,
|
234 |
+
"acc_norm": 0.5440414507772021,
|
235 |
+
"acc_norm_stderr": 0.03594413711272437
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.3684210526315789,
|
239 |
+
"acc_stderr": 0.04537815354939391,
|
240 |
+
"acc_norm": 0.3684210526315789,
|
241 |
+
"acc_norm_stderr": 0.04537815354939391
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5871559633027523,
|
245 |
+
"acc_stderr": 0.021109128133413913,
|
246 |
+
"acc_norm": 0.5871559633027523,
|
247 |
+
"acc_norm_stderr": 0.021109128133413913
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.373015873015873,
|
251 |
+
"acc_stderr": 0.04325506042017086,
|
252 |
+
"acc_norm": 0.373015873015873,
|
253 |
+
"acc_norm_stderr": 0.04325506042017086
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.5065359477124183,
|
257 |
+
"acc_stderr": 0.028627470550556047,
|
258 |
+
"acc_norm": 0.5065359477124183,
|
259 |
+
"acc_norm_stderr": 0.028627470550556047
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.48,
|
263 |
+
"acc_stderr": 0.050211673156867795,
|
264 |
+
"acc_norm": 0.48,
|
265 |
+
"acc_norm_stderr": 0.050211673156867795
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6942148760330579,
|
269 |
+
"acc_stderr": 0.04205953933884124,
|
270 |
+
"acc_norm": 0.6942148760330579,
|
271 |
+
"acc_norm_stderr": 0.04205953933884124
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.47368421052631576,
|
275 |
+
"acc_stderr": 0.04063302731486671,
|
276 |
+
"acc_norm": 0.47368421052631576,
|
277 |
+
"acc_norm_stderr": 0.04063302731486671
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.4019607843137255,
|
281 |
+
"acc_stderr": 0.01983517648437538,
|
282 |
+
"acc_norm": 0.4019607843137255,
|
283 |
+
"acc_norm_stderr": 0.01983517648437538
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3049645390070922,
|
287 |
+
"acc_stderr": 0.027464708442022128,
|
288 |
+
"acc_norm": 0.3049645390070922,
|
289 |
+
"acc_norm_stderr": 0.027464708442022128
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.3482142857142857,
|
293 |
+
"acc_stderr": 0.04521829902833585,
|
294 |
+
"acc_norm": 0.3482142857142857,
|
295 |
+
"acc_norm_stderr": 0.04521829902833585
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.46296296296296297,
|
299 |
+
"acc_stderr": 0.03400603625538272,
|
300 |
+
"acc_norm": 0.46296296296296297,
|
301 |
+
"acc_norm_stderr": 0.03400603625538272
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.24581005586592178,
|
305 |
+
"acc_stderr": 0.014400296429225612,
|
306 |
+
"acc_norm": 0.24581005586592178,
|
307 |
+
"acc_norm_stderr": 0.014400296429225612
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.44,
|
311 |
+
"acc_stderr": 0.04988876515698589,
|
312 |
+
"acc_norm": 0.44,
|
313 |
+
"acc_norm_stderr": 0.04988876515698589
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.57,
|
317 |
+
"acc_stderr": 0.04975698519562428,
|
318 |
+
"acc_norm": 0.57,
|
319 |
+
"acc_norm_stderr": 0.04975698519562428
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.4264705882352941,
|
323 |
+
"acc_stderr": 0.03004261583271487,
|
324 |
+
"acc_norm": 0.4264705882352941,
|
325 |
+
"acc_norm_stderr": 0.03004261583271487
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.6081632653061224,
|
329 |
+
"acc_stderr": 0.031251275910891656,
|
330 |
+
"acc_norm": 0.6081632653061224,
|
331 |
+
"acc_norm_stderr": 0.031251275910891656
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6033755274261603,
|
335 |
+
"acc_stderr": 0.031843998738112236,
|
336 |
+
"acc_norm": 0.6033755274261603,
|
337 |
+
"acc_norm_stderr": 0.031843998738112236
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.34876140808344197,
|
341 |
+
"acc_stderr": 0.012172035157127115,
|
342 |
+
"acc_norm": 0.34876140808344197,
|
343 |
+
"acc_norm_stderr": 0.012172035157127115
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.5343137254901961,
|
347 |
+
"acc_stderr": 0.03501038327635897,
|
348 |
+
"acc_norm": 0.5343137254901961,
|
349 |
+
"acc_norm_stderr": 0.03501038327635897
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5757575757575758,
|
353 |
+
"acc_stderr": 0.038592681420702636,
|
354 |
+
"acc_norm": 0.5757575757575758,
|
355 |
+
"acc_norm_stderr": 0.038592681420702636
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3157894736842105,
|
359 |
+
"mc1_stderr": 0.01627228795791694,
|
360 |
+
"mc2": 0.4887242465522298,
|
361 |
+
"mc2_stderr": 0.015611726455962618
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.48760330578512395,
|
365 |
+
"acc_stderr": 0.017185069732676528,
|
366 |
+
"acc_norm": 0.5478158205430933,
|
367 |
+
"acc_norm_stderr": 0.017111567130916796
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "12thD/ko-Llama-3-8B-sft-v0.3",
|
436 |
+
"model_sha": "134a44b329a37805306c77e45e932d839cae8baa",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
12thD/ko-gemma-7b-sft-v1.5/result_2024-04-03 05:50:30.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.20051194539249148,
|
5 |
+
"acc_stderr": 0.011700318050499368,
|
6 |
+
"acc_norm": 0.2645051194539249,
|
7 |
+
"acc_norm_stderr": 0.01288927294931337
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.2597092212706632,
|
11 |
+
"acc_stderr": 0.004375788991216851,
|
12 |
+
"acc_norm": 0.261700856403107,
|
13 |
+
"acc_norm_stderr": 0.0043866225891190805
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.2046783625730994,
|
17 |
+
"acc_stderr": 0.03094445977853321,
|
18 |
+
"acc_norm": 0.2046783625730994,
|
19 |
+
"acc_norm_stderr": 0.03094445977853321
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.17475728155339806,
|
23 |
+
"acc_stderr": 0.037601780060266196,
|
24 |
+
"acc_norm": 0.17475728155339806,
|
25 |
+
"acc_norm_stderr": 0.037601780060266196
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.21583652618135377,
|
29 |
+
"acc_stderr": 0.014711684386139946,
|
30 |
+
"acc_norm": 0.21583652618135377,
|
31 |
+
"acc_norm_stderr": 0.014711684386139946
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.22962962962962963,
|
35 |
+
"acc_stderr": 0.03633384414073462,
|
36 |
+
"acc_norm": 0.22962962962962963,
|
37 |
+
"acc_norm_stderr": 0.03633384414073462
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.21,
|
41 |
+
"acc_stderr": 0.040936018074033256,
|
42 |
+
"acc_norm": 0.21,
|
43 |
+
"acc_norm_stderr": 0.040936018074033256
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.20851063829787234,
|
47 |
+
"acc_stderr": 0.026556982117838746,
|
48 |
+
"acc_norm": 0.20851063829787234,
|
49 |
+
"acc_norm_stderr": 0.026556982117838746
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.21084337349397592,
|
53 |
+
"acc_stderr": 0.0317555478662992,
|
54 |
+
"acc_norm": 0.21084337349397592,
|
55 |
+
"acc_norm_stderr": 0.0317555478662992
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.2990353697749196,
|
59 |
+
"acc_stderr": 0.02600330111788514,
|
60 |
+
"acc_norm": 0.2990353697749196,
|
61 |
+
"acc_norm_stderr": 0.02600330111788514
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.2825112107623318,
|
65 |
+
"acc_stderr": 0.03021683101150878,
|
66 |
+
"acc_norm": 0.2825112107623318,
|
67 |
+
"acc_norm_stderr": 0.03021683101150878
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.25190839694656486,
|
71 |
+
"acc_stderr": 0.038073871163060866,
|
72 |
+
"acc_norm": 0.25190839694656486,
|
73 |
+
"acc_norm_stderr": 0.038073871163060866
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.23,
|
77 |
+
"acc_stderr": 0.04229525846816505,
|
78 |
+
"acc_norm": 0.23,
|
79 |
+
"acc_norm_stderr": 0.04229525846816505
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.23737373737373738,
|
83 |
+
"acc_stderr": 0.030313710538198885,
|
84 |
+
"acc_norm": 0.23737373737373738,
|
85 |
+
"acc_norm_stderr": 0.030313710538198885
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.2620689655172414,
|
89 |
+
"acc_stderr": 0.036646663372252565,
|
90 |
+
"acc_norm": 0.2620689655172414,
|
91 |
+
"acc_norm_stderr": 0.036646663372252565
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.1568627450980392,
|
95 |
+
"acc_stderr": 0.036186648199362445,
|
96 |
+
"acc_norm": 0.1568627450980392,
|
97 |
+
"acc_norm_stderr": 0.036186648199362445
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.25630252100840334,
|
101 |
+
"acc_stderr": 0.02835962087053395,
|
102 |
+
"acc_norm": 0.25630252100840334,
|
103 |
+
"acc_norm_stderr": 0.02835962087053395
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.31794871794871793,
|
107 |
+
"acc_stderr": 0.02361088430892786,
|
108 |
+
"acc_norm": 0.31794871794871793,
|
109 |
+
"acc_norm_stderr": 0.02361088430892786
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.19,
|
113 |
+
"acc_stderr": 0.03942772444036625,
|
114 |
+
"acc_norm": 0.19,
|
115 |
+
"acc_norm_stderr": 0.03942772444036625
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.2,
|
119 |
+
"acc_stderr": 0.04020151261036846,
|
120 |
+
"acc_norm": 0.2,
|
121 |
+
"acc_norm_stderr": 0.04020151261036846
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.21296296296296297,
|
125 |
+
"acc_stderr": 0.03957835471980981,
|
126 |
+
"acc_norm": 0.21296296296296297,
|
127 |
+
"acc_norm_stderr": 0.03957835471980981
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.28078817733990147,
|
131 |
+
"acc_stderr": 0.03161856335358609,
|
132 |
+
"acc_norm": 0.28078817733990147,
|
133 |
+
"acc_norm_stderr": 0.03161856335358609
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.3161290322580645,
|
137 |
+
"acc_stderr": 0.026450874489042764,
|
138 |
+
"acc_norm": 0.3161290322580645,
|
139 |
+
"acc_norm_stderr": 0.026450874489042764
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.19658119658119658,
|
143 |
+
"acc_stderr": 0.02603538609895129,
|
144 |
+
"acc_norm": 0.19658119658119658,
|
145 |
+
"acc_norm_stderr": 0.02603538609895129
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.2830188679245283,
|
149 |
+
"acc_stderr": 0.027724236492700904,
|
150 |
+
"acc_norm": 0.2830188679245283,
|
151 |
+
"acc_norm_stderr": 0.027724236492700904
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.24545454545454545,
|
155 |
+
"acc_stderr": 0.041220665028782834,
|
156 |
+
"acc_norm": 0.24545454545454545,
|
157 |
+
"acc_norm_stderr": 0.041220665028782834
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.29259259259259257,
|
161 |
+
"acc_stderr": 0.027738969632176088,
|
162 |
+
"acc_norm": 0.29259259259259257,
|
163 |
+
"acc_norm_stderr": 0.027738969632176088
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.32450331125827814,
|
167 |
+
"acc_stderr": 0.03822746937658754,
|
168 |
+
"acc_norm": 0.32450331125827814,
|
169 |
+
"acc_norm_stderr": 0.03822746937658754
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.26865671641791045,
|
173 |
+
"acc_stderr": 0.03134328358208954,
|
174 |
+
"acc_norm": 0.26865671641791045,
|
175 |
+
"acc_norm_stderr": 0.03134328358208954
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.32947976878612717,
|
179 |
+
"acc_stderr": 0.035839017547364134,
|
180 |
+
"acc_norm": 0.32947976878612717,
|
181 |
+
"acc_norm_stderr": 0.035839017547364134
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2566137566137566,
|
185 |
+
"acc_stderr": 0.022494510767503154,
|
186 |
+
"acc_norm": 0.2566137566137566,
|
187 |
+
"acc_norm_stderr": 0.022494510767503154
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2916666666666667,
|
191 |
+
"acc_stderr": 0.038009680605548574,
|
192 |
+
"acc_norm": 0.2916666666666667,
|
193 |
+
"acc_norm_stderr": 0.038009680605548574
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.41,
|
197 |
+
"acc_stderr": 0.049431107042371025,
|
198 |
+
"acc_norm": 0.41,
|
199 |
+
"acc_norm_stderr": 0.049431107042371025
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.27,
|
203 |
+
"acc_stderr": 0.04461960433384741,
|
204 |
+
"acc_norm": 0.27,
|
205 |
+
"acc_norm_stderr": 0.04461960433384741
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.21965317919075145,
|
209 |
+
"acc_stderr": 0.022289638852617897,
|
210 |
+
"acc_norm": 0.21965317919075145,
|
211 |
+
"acc_norm_stderr": 0.022289638852617897
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.25766871165644173,
|
215 |
+
"acc_stderr": 0.03436150827846917,
|
216 |
+
"acc_norm": 0.25766871165644173,
|
217 |
+
"acc_norm_stderr": 0.03436150827846917
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.23148148148148148,
|
221 |
+
"acc_stderr": 0.023468429832451138,
|
222 |
+
"acc_norm": 0.23148148148148148,
|
223 |
+
"acc_norm_stderr": 0.023468429832451138
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.28,
|
227 |
+
"acc_stderr": 0.045126085985421296,
|
228 |
+
"acc_norm": 0.28,
|
229 |
+
"acc_norm_stderr": 0.045126085985421296
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.3626943005181347,
|
233 |
+
"acc_stderr": 0.034697137917043715,
|
234 |
+
"acc_norm": 0.3626943005181347,
|
235 |
+
"acc_norm_stderr": 0.034697137917043715
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2894736842105263,
|
239 |
+
"acc_stderr": 0.04266339443159394,
|
240 |
+
"acc_norm": 0.2894736842105263,
|
241 |
+
"acc_norm_stderr": 0.04266339443159394
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3284403669724771,
|
245 |
+
"acc_stderr": 0.020135902797298395,
|
246 |
+
"acc_norm": 0.3284403669724771,
|
247 |
+
"acc_norm_stderr": 0.020135902797298395
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.36507936507936506,
|
251 |
+
"acc_stderr": 0.04306241259127153,
|
252 |
+
"acc_norm": 0.36507936507936506,
|
253 |
+
"acc_norm_stderr": 0.04306241259127153
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.2222222222222222,
|
257 |
+
"acc_stderr": 0.023805186524888156,
|
258 |
+
"acc_norm": 0.2222222222222222,
|
259 |
+
"acc_norm_stderr": 0.023805186524888156
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.17,
|
263 |
+
"acc_stderr": 0.03775251680686371,
|
264 |
+
"acc_norm": 0.17,
|
265 |
+
"acc_norm_stderr": 0.03775251680686371
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.32231404958677684,
|
269 |
+
"acc_stderr": 0.04266416363352168,
|
270 |
+
"acc_norm": 0.32231404958677684,
|
271 |
+
"acc_norm_stderr": 0.04266416363352168
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3355263157894737,
|
275 |
+
"acc_stderr": 0.038424985593952694,
|
276 |
+
"acc_norm": 0.3355263157894737,
|
277 |
+
"acc_norm_stderr": 0.038424985593952694
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2173202614379085,
|
281 |
+
"acc_stderr": 0.016684820929148598,
|
282 |
+
"acc_norm": 0.2173202614379085,
|
283 |
+
"acc_norm_stderr": 0.016684820929148598
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2624113475177305,
|
287 |
+
"acc_stderr": 0.026244920349843,
|
288 |
+
"acc_norm": 0.2624113475177305,
|
289 |
+
"acc_norm_stderr": 0.026244920349843
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.1875,
|
293 |
+
"acc_stderr": 0.0370468111477387,
|
294 |
+
"acc_norm": 0.1875,
|
295 |
+
"acc_norm_stderr": 0.0370468111477387
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4722222222222222,
|
299 |
+
"acc_stderr": 0.0340470532865388,
|
300 |
+
"acc_norm": 0.4722222222222222,
|
301 |
+
"acc_norm_stderr": 0.0340470532865388
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.27262569832402234,
|
305 |
+
"acc_stderr": 0.014893391735249608,
|
306 |
+
"acc_norm": 0.27262569832402234,
|
307 |
+
"acc_norm_stderr": 0.014893391735249608
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.33,
|
311 |
+
"acc_stderr": 0.04725815626252604,
|
312 |
+
"acc_norm": 0.33,
|
313 |
+
"acc_norm_stderr": 0.04725815626252604
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.18,
|
317 |
+
"acc_stderr": 0.038612291966536934,
|
318 |
+
"acc_norm": 0.18,
|
319 |
+
"acc_norm_stderr": 0.038612291966536934
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.4485294117647059,
|
323 |
+
"acc_stderr": 0.030211479609121593,
|
324 |
+
"acc_norm": 0.4485294117647059,
|
325 |
+
"acc_norm_stderr": 0.030211479609121593
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4,
|
329 |
+
"acc_stderr": 0.03136250240935892,
|
330 |
+
"acc_norm": 0.4,
|
331 |
+
"acc_norm_stderr": 0.03136250240935892
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.2616033755274262,
|
335 |
+
"acc_stderr": 0.028609516716994934,
|
336 |
+
"acc_norm": 0.2616033755274262,
|
337 |
+
"acc_norm_stderr": 0.028609516716994934
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2470664928292047,
|
341 |
+
"acc_stderr": 0.011015752255279341,
|
342 |
+
"acc_norm": 0.2470664928292047,
|
343 |
+
"acc_norm_stderr": 0.011015752255279341
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.25980392156862747,
|
347 |
+
"acc_stderr": 0.030778554678693257,
|
348 |
+
"acc_norm": 0.25980392156862747,
|
349 |
+
"acc_norm_stderr": 0.030778554678693257
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.21818181818181817,
|
353 |
+
"acc_stderr": 0.03225078108306289,
|
354 |
+
"acc_norm": 0.21818181818181817,
|
355 |
+
"acc_norm_stderr": 0.03225078108306289
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.24969400244798043,
|
359 |
+
"mc1_stderr": 0.015152286907148125,
|
360 |
+
"mc2": 0.41054014787859444,
|
361 |
+
"mc2_stderr": 0.016235535860246012
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.20425029515938606,
|
365 |
+
"acc_stderr": 0.01386067587817683,
|
366 |
+
"acc_norm": 0.2632821723730815,
|
367 |
+
"acc_norm_stderr": 0.01514175219957321
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "12thD/ko-gemma-7b-sft-v1.5",
|
436 |
+
"model_sha": "917d34440057e05d95620548d7b3b575d95d355a",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.39505119453924914,
|
5 |
+
"acc_stderr": 0.014285898292938165,
|
6 |
+
"acc_norm": 0.4445392491467577,
|
7 |
+
"acc_norm_stderr": 0.014521226405627077
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.41545508862776337,
|
11 |
+
"acc_stderr": 0.004917931778593191,
|
12 |
+
"acc_norm": 0.5571599283011353,
|
13 |
+
"acc_norm_stderr": 0.004957068377516512
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.0383161053282193,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.0383161053282193
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.4174757281553398,
|
23 |
+
"acc_stderr": 0.048828405482122375,
|
24 |
+
"acc_norm": 0.4174757281553398,
|
25 |
+
"acc_norm_stderr": 0.048828405482122375
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5108556832694764,
|
29 |
+
"acc_stderr": 0.017875748840242407,
|
30 |
+
"acc_norm": 0.5108556832694764,
|
31 |
+
"acc_norm_stderr": 0.017875748840242407
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4222222222222222,
|
35 |
+
"acc_stderr": 0.04266763404099582,
|
36 |
+
"acc_norm": 0.4222222222222222,
|
37 |
+
"acc_norm_stderr": 0.04266763404099582
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.25,
|
41 |
+
"acc_stderr": 0.04351941398892446,
|
42 |
+
"acc_norm": 0.25,
|
43 |
+
"acc_norm_stderr": 0.04351941398892446
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.32340425531914896,
|
47 |
+
"acc_stderr": 0.03057944277361033,
|
48 |
+
"acc_norm": 0.32340425531914896,
|
49 |
+
"acc_norm_stderr": 0.03057944277361033
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.41566265060240964,
|
53 |
+
"acc_stderr": 0.03836722176598053,
|
54 |
+
"acc_norm": 0.41566265060240964,
|
55 |
+
"acc_norm_stderr": 0.03836722176598053
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4758842443729904,
|
59 |
+
"acc_stderr": 0.02836504154256457,
|
60 |
+
"acc_norm": 0.4758842443729904,
|
61 |
+
"acc_norm_stderr": 0.02836504154256457
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.3811659192825112,
|
65 |
+
"acc_stderr": 0.03259625118416828,
|
66 |
+
"acc_norm": 0.3811659192825112,
|
67 |
+
"acc_norm_stderr": 0.03259625118416828
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.46564885496183206,
|
71 |
+
"acc_stderr": 0.043749285605997376,
|
72 |
+
"acc_norm": 0.46564885496183206,
|
73 |
+
"acc_norm_stderr": 0.043749285605997376
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.3,
|
77 |
+
"acc_stderr": 0.046056618647183814,
|
78 |
+
"acc_norm": 0.3,
|
79 |
+
"acc_norm_stderr": 0.046056618647183814
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.4696969696969697,
|
83 |
+
"acc_stderr": 0.03555804051763929,
|
84 |
+
"acc_norm": 0.4696969696969697,
|
85 |
+
"acc_norm_stderr": 0.03555804051763929
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4068965517241379,
|
89 |
+
"acc_stderr": 0.04093793981266237,
|
90 |
+
"acc_norm": 0.4068965517241379,
|
91 |
+
"acc_norm_stderr": 0.04093793981266237
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.2549019607843137,
|
95 |
+
"acc_stderr": 0.043364327079931785,
|
96 |
+
"acc_norm": 0.2549019607843137,
|
97 |
+
"acc_norm_stderr": 0.043364327079931785
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.36134453781512604,
|
101 |
+
"acc_stderr": 0.031204691225150013,
|
102 |
+
"acc_norm": 0.36134453781512604,
|
103 |
+
"acc_norm_stderr": 0.031204691225150013
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.3564102564102564,
|
107 |
+
"acc_stderr": 0.02428314052946728,
|
108 |
+
"acc_norm": 0.3564102564102564,
|
109 |
+
"acc_norm_stderr": 0.02428314052946728
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.47,
|
113 |
+
"acc_stderr": 0.050161355804659205,
|
114 |
+
"acc_norm": 0.47,
|
115 |
+
"acc_norm_stderr": 0.050161355804659205
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.35,
|
119 |
+
"acc_stderr": 0.047937248544110196,
|
120 |
+
"acc_norm": 0.35,
|
121 |
+
"acc_norm_stderr": 0.047937248544110196
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.4166666666666667,
|
125 |
+
"acc_stderr": 0.04766075165356461,
|
126 |
+
"acc_norm": 0.4166666666666667,
|
127 |
+
"acc_norm_stderr": 0.04766075165356461
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3448275862068966,
|
131 |
+
"acc_stderr": 0.03344283744280459,
|
132 |
+
"acc_norm": 0.3448275862068966,
|
133 |
+
"acc_norm_stderr": 0.03344283744280459
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.4483870967741935,
|
137 |
+
"acc_stderr": 0.028292056830112735,
|
138 |
+
"acc_norm": 0.4483870967741935,
|
139 |
+
"acc_norm_stderr": 0.028292056830112735
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6068376068376068,
|
143 |
+
"acc_stderr": 0.03199957924651047,
|
144 |
+
"acc_norm": 0.6068376068376068,
|
145 |
+
"acc_norm_stderr": 0.03199957924651047
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.43018867924528303,
|
149 |
+
"acc_stderr": 0.030471445867183238,
|
150 |
+
"acc_norm": 0.43018867924528303,
|
151 |
+
"acc_norm_stderr": 0.030471445867183238
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4909090909090909,
|
155 |
+
"acc_stderr": 0.04788339768702861,
|
156 |
+
"acc_norm": 0.4909090909090909,
|
157 |
+
"acc_norm_stderr": 0.04788339768702861
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2518518518518518,
|
161 |
+
"acc_stderr": 0.026466117538959916,
|
162 |
+
"acc_norm": 0.2518518518518518,
|
163 |
+
"acc_norm_stderr": 0.026466117538959916
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.26490066225165565,
|
167 |
+
"acc_stderr": 0.03603038545360384,
|
168 |
+
"acc_norm": 0.26490066225165565,
|
169 |
+
"acc_norm_stderr": 0.03603038545360384
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5124378109452736,
|
173 |
+
"acc_stderr": 0.0353443984853958,
|
174 |
+
"acc_norm": 0.5124378109452736,
|
175 |
+
"acc_norm_stderr": 0.0353443984853958
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3583815028901734,
|
179 |
+
"acc_stderr": 0.03656343653353159,
|
180 |
+
"acc_norm": 0.3583815028901734,
|
181 |
+
"acc_norm_stderr": 0.03656343653353159
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.30687830687830686,
|
185 |
+
"acc_stderr": 0.023752928712112126,
|
186 |
+
"acc_norm": 0.30687830687830686,
|
187 |
+
"acc_norm_stderr": 0.023752928712112126
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2986111111111111,
|
191 |
+
"acc_stderr": 0.03827052357950756,
|
192 |
+
"acc_norm": 0.2986111111111111,
|
193 |
+
"acc_norm_stderr": 0.03827052357950756
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.22,
|
197 |
+
"acc_stderr": 0.04163331998932269,
|
198 |
+
"acc_norm": 0.22,
|
199 |
+
"acc_norm_stderr": 0.04163331998932269
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.53,
|
203 |
+
"acc_stderr": 0.050161355804659205,
|
204 |
+
"acc_norm": 0.53,
|
205 |
+
"acc_norm_stderr": 0.050161355804659205
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.44508670520231214,
|
209 |
+
"acc_stderr": 0.02675625512966377,
|
210 |
+
"acc_norm": 0.44508670520231214,
|
211 |
+
"acc_norm_stderr": 0.02675625512966377
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3558282208588957,
|
215 |
+
"acc_stderr": 0.03761521380046734,
|
216 |
+
"acc_norm": 0.3558282208588957,
|
217 |
+
"acc_norm_stderr": 0.03761521380046734
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.44135802469135804,
|
221 |
+
"acc_stderr": 0.027628737155668777,
|
222 |
+
"acc_norm": 0.44135802469135804,
|
223 |
+
"acc_norm_stderr": 0.027628737155668777
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.24,
|
227 |
+
"acc_stderr": 0.04292346959909283,
|
228 |
+
"acc_norm": 0.24,
|
229 |
+
"acc_norm_stderr": 0.04292346959909283
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.40414507772020725,
|
233 |
+
"acc_stderr": 0.0354150857888402,
|
234 |
+
"acc_norm": 0.40414507772020725,
|
235 |
+
"acc_norm_stderr": 0.0354150857888402
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2631578947368421,
|
239 |
+
"acc_stderr": 0.041424397194893624,
|
240 |
+
"acc_norm": 0.2631578947368421,
|
241 |
+
"acc_norm_stderr": 0.041424397194893624
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.4091743119266055,
|
245 |
+
"acc_stderr": 0.02108067026443373,
|
246 |
+
"acc_norm": 0.4091743119266055,
|
247 |
+
"acc_norm_stderr": 0.02108067026443373
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.1984126984126984,
|
251 |
+
"acc_stderr": 0.03567016675276863,
|
252 |
+
"acc_norm": 0.1984126984126984,
|
253 |
+
"acc_norm_stderr": 0.03567016675276863
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.39869281045751637,
|
257 |
+
"acc_stderr": 0.02803609227389177,
|
258 |
+
"acc_norm": 0.39869281045751637,
|
259 |
+
"acc_norm_stderr": 0.02803609227389177
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.42,
|
263 |
+
"acc_stderr": 0.049604496374885836,
|
264 |
+
"acc_norm": 0.42,
|
265 |
+
"acc_norm_stderr": 0.049604496374885836
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6033057851239669,
|
269 |
+
"acc_stderr": 0.04465869780531009,
|
270 |
+
"acc_norm": 0.6033057851239669,
|
271 |
+
"acc_norm_stderr": 0.04465869780531009
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4342105263157895,
|
275 |
+
"acc_stderr": 0.040335656678483184,
|
276 |
+
"acc_norm": 0.4342105263157895,
|
277 |
+
"acc_norm_stderr": 0.040335656678483184
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3333333333333333,
|
281 |
+
"acc_stderr": 0.0190709855896875,
|
282 |
+
"acc_norm": 0.3333333333333333,
|
283 |
+
"acc_norm_stderr": 0.0190709855896875
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3191489361702128,
|
287 |
+
"acc_stderr": 0.027807990141320207,
|
288 |
+
"acc_norm": 0.3191489361702128,
|
289 |
+
"acc_norm_stderr": 0.027807990141320207
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.19642857142857142,
|
293 |
+
"acc_stderr": 0.03770970049347019,
|
294 |
+
"acc_norm": 0.19642857142857142,
|
295 |
+
"acc_norm_stderr": 0.03770970049347019
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.25462962962962965,
|
299 |
+
"acc_stderr": 0.02971127586000534,
|
300 |
+
"acc_norm": 0.25462962962962965,
|
301 |
+
"acc_norm_stderr": 0.02971127586000534
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2424581005586592,
|
305 |
+
"acc_stderr": 0.01433352205921789,
|
306 |
+
"acc_norm": 0.2424581005586592,
|
307 |
+
"acc_norm_stderr": 0.01433352205921789
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.31,
|
311 |
+
"acc_stderr": 0.04648231987117316,
|
312 |
+
"acc_norm": 0.31,
|
313 |
+
"acc_norm_stderr": 0.04648231987117316
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.44,
|
317 |
+
"acc_stderr": 0.04988876515698589,
|
318 |
+
"acc_norm": 0.44,
|
319 |
+
"acc_norm_stderr": 0.04988876515698589
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.22426470588235295,
|
323 |
+
"acc_stderr": 0.02533684856333237,
|
324 |
+
"acc_norm": 0.22426470588235295,
|
325 |
+
"acc_norm_stderr": 0.02533684856333237
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4163265306122449,
|
329 |
+
"acc_stderr": 0.03155782816556164,
|
330 |
+
"acc_norm": 0.4163265306122449,
|
331 |
+
"acc_norm_stderr": 0.03155782816556164
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.41350210970464135,
|
335 |
+
"acc_stderr": 0.03205649904851859,
|
336 |
+
"acc_norm": 0.41350210970464135,
|
337 |
+
"acc_norm_stderr": 0.03205649904851859
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.29335071707953064,
|
341 |
+
"acc_stderr": 0.011628520449582076,
|
342 |
+
"acc_norm": 0.29335071707953064,
|
343 |
+
"acc_norm_stderr": 0.011628520449582076
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.3627450980392157,
|
347 |
+
"acc_stderr": 0.033744993563193555,
|
348 |
+
"acc_norm": 0.3627450980392157,
|
349 |
+
"acc_norm_stderr": 0.033744993563193555
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.43636363636363634,
|
353 |
+
"acc_stderr": 0.03872592983524754,
|
354 |
+
"acc_norm": 0.43636363636363634,
|
355 |
+
"acc_norm_stderr": 0.03872592983524754
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2962056303549572,
|
359 |
+
"mc1_stderr": 0.01598359510181139,
|
360 |
+
"mc2": 0.4602391231259313,
|
361 |
+
"mc2_stderr": 0.015191570633369808
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4757969303423849,
|
365 |
+
"acc_stderr": 0.017170202466520748,
|
366 |
+
"acc_norm": 0.5454545454545454,
|
367 |
+
"acc_norm_stderr": 0.017119172208061504
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama-2-ko-en-instruct-v1",
|
436 |
+
"model_sha": "aee07500d61a1d5d214cf0bc0040650957cf3da0",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.4112627986348123,
|
5 |
+
"acc_stderr": 0.014379441068522077,
|
6 |
+
"acc_norm": 0.45733788395904434,
|
7 |
+
"acc_norm_stderr": 0.014558106543924067
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.43288189603664606,
|
11 |
+
"acc_stderr": 0.004944620712318274,
|
12 |
+
"acc_norm": 0.5816570404301932,
|
13 |
+
"acc_norm_stderr": 0.004922789247319874
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5321637426900585,
|
17 |
+
"acc_stderr": 0.038268824176603704,
|
18 |
+
"acc_norm": 0.5321637426900585,
|
19 |
+
"acc_norm_stderr": 0.038268824176603704
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5242718446601942,
|
23 |
+
"acc_stderr": 0.049449010929737795,
|
24 |
+
"acc_norm": 0.5242718446601942,
|
25 |
+
"acc_norm_stderr": 0.049449010929737795
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5517241379310345,
|
29 |
+
"acc_stderr": 0.01778403453499242,
|
30 |
+
"acc_norm": 0.5517241379310345,
|
31 |
+
"acc_norm_stderr": 0.01778403453499242
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4074074074074074,
|
35 |
+
"acc_stderr": 0.0424463323835323,
|
36 |
+
"acc_norm": 0.4074074074074074,
|
37 |
+
"acc_norm_stderr": 0.0424463323835323
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.27,
|
41 |
+
"acc_stderr": 0.04461960433384741,
|
42 |
+
"acc_norm": 0.27,
|
43 |
+
"acc_norm_stderr": 0.04461960433384741
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.40425531914893614,
|
47 |
+
"acc_stderr": 0.03208115750788684,
|
48 |
+
"acc_norm": 0.40425531914893614,
|
49 |
+
"acc_norm_stderr": 0.03208115750788684
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.43373493975903615,
|
53 |
+
"acc_stderr": 0.03858158940685515,
|
54 |
+
"acc_norm": 0.43373493975903615,
|
55 |
+
"acc_norm_stderr": 0.03858158940685515
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4919614147909968,
|
59 |
+
"acc_stderr": 0.028394421370984545,
|
60 |
+
"acc_norm": 0.4919614147909968,
|
61 |
+
"acc_norm_stderr": 0.028394421370984545
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.5022421524663677,
|
65 |
+
"acc_stderr": 0.033557465352232634,
|
66 |
+
"acc_norm": 0.5022421524663677,
|
67 |
+
"acc_norm_stderr": 0.033557465352232634
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.48091603053435117,
|
71 |
+
"acc_stderr": 0.04382094705550989,
|
72 |
+
"acc_norm": 0.48091603053435117,
|
73 |
+
"acc_norm_stderr": 0.04382094705550989
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.39,
|
77 |
+
"acc_stderr": 0.04902071300001975,
|
78 |
+
"acc_norm": 0.39,
|
79 |
+
"acc_norm_stderr": 0.04902071300001975
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5858585858585859,
|
83 |
+
"acc_stderr": 0.035094383488796295,
|
84 |
+
"acc_norm": 0.5858585858585859,
|
85 |
+
"acc_norm_stderr": 0.035094383488796295
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.36551724137931035,
|
89 |
+
"acc_stderr": 0.040131241954243856,
|
90 |
+
"acc_norm": 0.36551724137931035,
|
91 |
+
"acc_norm_stderr": 0.040131241954243856
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.22549019607843138,
|
95 |
+
"acc_stderr": 0.04158307533083286,
|
96 |
+
"acc_norm": 0.22549019607843138,
|
97 |
+
"acc_norm_stderr": 0.04158307533083286
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.46638655462184875,
|
101 |
+
"acc_stderr": 0.03240501447690071,
|
102 |
+
"acc_norm": 0.46638655462184875,
|
103 |
+
"acc_norm_stderr": 0.03240501447690071
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.45384615384615384,
|
107 |
+
"acc_stderr": 0.02524277098712617,
|
108 |
+
"acc_norm": 0.45384615384615384,
|
109 |
+
"acc_norm_stderr": 0.02524277098712617
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.55,
|
113 |
+
"acc_stderr": 0.04999999999999999,
|
114 |
+
"acc_norm": 0.55,
|
115 |
+
"acc_norm_stderr": 0.04999999999999999
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.37,
|
119 |
+
"acc_stderr": 0.04852365870939099,
|
120 |
+
"acc_norm": 0.37,
|
121 |
+
"acc_norm_stderr": 0.04852365870939099
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5370370370370371,
|
125 |
+
"acc_stderr": 0.04820403072760627,
|
126 |
+
"acc_norm": 0.5370370370370371,
|
127 |
+
"acc_norm_stderr": 0.04820403072760627
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35960591133004927,
|
131 |
+
"acc_stderr": 0.03376458246509567,
|
132 |
+
"acc_norm": 0.35960591133004927,
|
133 |
+
"acc_norm_stderr": 0.03376458246509567
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.4612903225806452,
|
137 |
+
"acc_stderr": 0.02835863485983692,
|
138 |
+
"acc_norm": 0.4612903225806452,
|
139 |
+
"acc_norm_stderr": 0.02835863485983692
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6538461538461539,
|
143 |
+
"acc_stderr": 0.031166957367235903,
|
144 |
+
"acc_norm": 0.6538461538461539,
|
145 |
+
"acc_norm_stderr": 0.031166957367235903
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4490566037735849,
|
149 |
+
"acc_stderr": 0.030612730713641092,
|
150 |
+
"acc_norm": 0.4490566037735849,
|
151 |
+
"acc_norm_stderr": 0.030612730713641092
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.6,
|
155 |
+
"acc_stderr": 0.0469237132203465,
|
156 |
+
"acc_norm": 0.6,
|
157 |
+
"acc_norm_stderr": 0.0469237132203465
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2518518518518518,
|
161 |
+
"acc_stderr": 0.026466117538959916,
|
162 |
+
"acc_norm": 0.2518518518518518,
|
163 |
+
"acc_norm_stderr": 0.026466117538959916
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.31125827814569534,
|
167 |
+
"acc_stderr": 0.03780445850526733,
|
168 |
+
"acc_norm": 0.31125827814569534,
|
169 |
+
"acc_norm_stderr": 0.03780445850526733
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5572139303482587,
|
173 |
+
"acc_stderr": 0.03512310964123937,
|
174 |
+
"acc_norm": 0.5572139303482587,
|
175 |
+
"acc_norm_stderr": 0.03512310964123937
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.41040462427745666,
|
179 |
+
"acc_stderr": 0.03750757044895538,
|
180 |
+
"acc_norm": 0.41040462427745666,
|
181 |
+
"acc_norm_stderr": 0.03750757044895538
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.29894179894179895,
|
185 |
+
"acc_stderr": 0.023577604791655812,
|
186 |
+
"acc_norm": 0.29894179894179895,
|
187 |
+
"acc_norm_stderr": 0.023577604791655812
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3958333333333333,
|
191 |
+
"acc_stderr": 0.04089465449325583,
|
192 |
+
"acc_norm": 0.3958333333333333,
|
193 |
+
"acc_norm_stderr": 0.04089465449325583
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.3,
|
197 |
+
"acc_stderr": 0.046056618647183814,
|
198 |
+
"acc_norm": 0.3,
|
199 |
+
"acc_norm_stderr": 0.046056618647183814
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.65,
|
203 |
+
"acc_stderr": 0.04793724854411018,
|
204 |
+
"acc_norm": 0.65,
|
205 |
+
"acc_norm_stderr": 0.04793724854411018
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5,
|
209 |
+
"acc_stderr": 0.026919095102908273,
|
210 |
+
"acc_norm": 0.5,
|
211 |
+
"acc_norm_stderr": 0.026919095102908273
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.5828220858895705,
|
215 |
+
"acc_stderr": 0.03874102859818082,
|
216 |
+
"acc_norm": 0.5828220858895705,
|
217 |
+
"acc_norm_stderr": 0.03874102859818082
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49691358024691357,
|
221 |
+
"acc_stderr": 0.027820214158594377,
|
222 |
+
"acc_norm": 0.49691358024691357,
|
223 |
+
"acc_norm_stderr": 0.027820214158594377
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.36,
|
227 |
+
"acc_stderr": 0.04824181513244218,
|
228 |
+
"acc_norm": 0.36,
|
229 |
+
"acc_norm_stderr": 0.04824181513244218
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.538860103626943,
|
233 |
+
"acc_stderr": 0.03597524411734578,
|
234 |
+
"acc_norm": 0.538860103626943,
|
235 |
+
"acc_norm_stderr": 0.03597524411734578
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.21052631578947367,
|
239 |
+
"acc_stderr": 0.03835153954399419,
|
240 |
+
"acc_norm": 0.21052631578947367,
|
241 |
+
"acc_norm_stderr": 0.03835153954399419
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5926605504587156,
|
245 |
+
"acc_stderr": 0.02106598624441288,
|
246 |
+
"acc_norm": 0.5926605504587156,
|
247 |
+
"acc_norm_stderr": 0.02106598624441288
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3333333333333333,
|
251 |
+
"acc_stderr": 0.04216370213557835,
|
252 |
+
"acc_norm": 0.3333333333333333,
|
253 |
+
"acc_norm_stderr": 0.04216370213557835
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.42483660130718953,
|
257 |
+
"acc_stderr": 0.02830457667314111,
|
258 |
+
"acc_norm": 0.42483660130718953,
|
259 |
+
"acc_norm_stderr": 0.02830457667314111
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.47,
|
263 |
+
"acc_stderr": 0.05016135580465919,
|
264 |
+
"acc_norm": 0.47,
|
265 |
+
"acc_norm_stderr": 0.05016135580465919
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5950413223140496,
|
269 |
+
"acc_stderr": 0.04481137755942469,
|
270 |
+
"acc_norm": 0.5950413223140496,
|
271 |
+
"acc_norm_stderr": 0.04481137755942469
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.375,
|
275 |
+
"acc_stderr": 0.039397364351956274,
|
276 |
+
"acc_norm": 0.375,
|
277 |
+
"acc_norm_stderr": 0.039397364351956274
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.4215686274509804,
|
281 |
+
"acc_stderr": 0.01997742260022747,
|
282 |
+
"acc_norm": 0.4215686274509804,
|
283 |
+
"acc_norm_stderr": 0.01997742260022747
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3546099290780142,
|
287 |
+
"acc_stderr": 0.02853865002887864,
|
288 |
+
"acc_norm": 0.3546099290780142,
|
289 |
+
"acc_norm_stderr": 0.02853865002887864
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.2767857142857143,
|
293 |
+
"acc_stderr": 0.04246624336697624,
|
294 |
+
"acc_norm": 0.2767857142857143,
|
295 |
+
"acc_norm_stderr": 0.04246624336697624
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.33796296296296297,
|
299 |
+
"acc_stderr": 0.03225941352631295,
|
300 |
+
"acc_norm": 0.33796296296296297,
|
301 |
+
"acc_norm_stderr": 0.03225941352631295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2435754189944134,
|
305 |
+
"acc_stderr": 0.01435591196476786,
|
306 |
+
"acc_norm": 0.2435754189944134,
|
307 |
+
"acc_norm_stderr": 0.01435591196476786
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.36,
|
311 |
+
"acc_stderr": 0.04824181513244218,
|
312 |
+
"acc_norm": 0.36,
|
313 |
+
"acc_norm_stderr": 0.04824181513244218
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.41,
|
317 |
+
"acc_stderr": 0.049431107042371025,
|
318 |
+
"acc_norm": 0.41,
|
319 |
+
"acc_norm_stderr": 0.049431107042371025
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3860294117647059,
|
323 |
+
"acc_stderr": 0.029573269134411124,
|
324 |
+
"acc_norm": 0.3860294117647059,
|
325 |
+
"acc_norm_stderr": 0.029573269134411124
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5061224489795918,
|
329 |
+
"acc_stderr": 0.03200682020163907,
|
330 |
+
"acc_norm": 0.5061224489795918,
|
331 |
+
"acc_norm_stderr": 0.03200682020163907
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6751054852320675,
|
335 |
+
"acc_stderr": 0.030486039389105303,
|
336 |
+
"acc_norm": 0.6751054852320675,
|
337 |
+
"acc_norm_stderr": 0.030486039389105303
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3774445893089961,
|
341 |
+
"acc_stderr": 0.012380680911165804,
|
342 |
+
"acc_norm": 0.3774445893089961,
|
343 |
+
"acc_norm_stderr": 0.012380680911165804
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.5245098039215687,
|
347 |
+
"acc_stderr": 0.035050931943487976,
|
348 |
+
"acc_norm": 0.5245098039215687,
|
349 |
+
"acc_norm_stderr": 0.035050931943487976
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5575757575757576,
|
353 |
+
"acc_stderr": 0.03878372113711275,
|
354 |
+
"acc_norm": 0.5575757575757576,
|
355 |
+
"acc_norm_stderr": 0.03878372113711275
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.31946144430844553,
|
359 |
+
"mc1_stderr": 0.0163226441829605,
|
360 |
+
"mc2": 0.4756188079524156,
|
361 |
+
"mc2_stderr": 0.015396392654893808
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.5100354191263282,
|
365 |
+
"acc_stderr": 0.01718689128689406,
|
366 |
+
"acc_norm": 0.5832349468713105,
|
367 |
+
"acc_norm_stderr": 0.01695048914610882
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-test3",
|
436 |
+
"model_sha": "d70fdfed2e0b43ac6715ee5ec24801fd2bd5c25d",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.39505119453924914,
|
5 |
+
"acc_stderr": 0.014285898292938165,
|
6 |
+
"acc_norm": 0.45819112627986347,
|
7 |
+
"acc_norm_stderr": 0.014560220308714702
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.42561242780322645,
|
11 |
+
"acc_stderr": 0.004934250390879782,
|
12 |
+
"acc_norm": 0.569308902609042,
|
13 |
+
"acc_norm_stderr": 0.004941609820763589
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.0383161053282193,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.0383161053282193
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.4563106796116505,
|
23 |
+
"acc_stderr": 0.049318019942204146,
|
24 |
+
"acc_norm": 0.4563106796116505,
|
25 |
+
"acc_norm_stderr": 0.049318019942204146
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5210727969348659,
|
29 |
+
"acc_stderr": 0.01786407678621291,
|
30 |
+
"acc_norm": 0.5210727969348659,
|
31 |
+
"acc_norm_stderr": 0.01786407678621291
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4888888888888889,
|
35 |
+
"acc_stderr": 0.04318275491977976,
|
36 |
+
"acc_norm": 0.4888888888888889,
|
37 |
+
"acc_norm_stderr": 0.04318275491977976
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.40425531914893614,
|
47 |
+
"acc_stderr": 0.032081157507886836,
|
48 |
+
"acc_norm": 0.40425531914893614,
|
49 |
+
"acc_norm_stderr": 0.032081157507886836
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.39759036144578314,
|
53 |
+
"acc_stderr": 0.03809973084540218,
|
54 |
+
"acc_norm": 0.39759036144578314,
|
55 |
+
"acc_norm_stderr": 0.03809973084540218
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4533762057877814,
|
59 |
+
"acc_stderr": 0.02827435985489424,
|
60 |
+
"acc_norm": 0.4533762057877814,
|
61 |
+
"acc_norm_stderr": 0.02827435985489424
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.47085201793721976,
|
65 |
+
"acc_stderr": 0.03350073248773403,
|
66 |
+
"acc_norm": 0.47085201793721976,
|
67 |
+
"acc_norm_stderr": 0.03350073248773403
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.42748091603053434,
|
71 |
+
"acc_stderr": 0.043389203057924,
|
72 |
+
"acc_norm": 0.42748091603053434,
|
73 |
+
"acc_norm_stderr": 0.043389203057924
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.049604496374885836,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.049604496374885836
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5151515151515151,
|
83 |
+
"acc_stderr": 0.0356071651653106,
|
84 |
+
"acc_norm": 0.5151515151515151,
|
85 |
+
"acc_norm_stderr": 0.0356071651653106
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.45517241379310347,
|
89 |
+
"acc_stderr": 0.04149886942192117,
|
90 |
+
"acc_norm": 0.45517241379310347,
|
91 |
+
"acc_norm_stderr": 0.04149886942192117
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.16666666666666666,
|
95 |
+
"acc_stderr": 0.03708284662416544,
|
96 |
+
"acc_norm": 0.16666666666666666,
|
97 |
+
"acc_norm_stderr": 0.03708284662416544
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.37815126050420167,
|
101 |
+
"acc_stderr": 0.031499305777849054,
|
102 |
+
"acc_norm": 0.37815126050420167,
|
103 |
+
"acc_norm_stderr": 0.031499305777849054
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.37435897435897436,
|
107 |
+
"acc_stderr": 0.024537591572830517,
|
108 |
+
"acc_norm": 0.37435897435897436,
|
109 |
+
"acc_norm_stderr": 0.024537591572830517
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.56,
|
113 |
+
"acc_stderr": 0.04988876515698589,
|
114 |
+
"acc_norm": 0.56,
|
115 |
+
"acc_norm_stderr": 0.04988876515698589
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.31,
|
119 |
+
"acc_stderr": 0.04648231987117316,
|
120 |
+
"acc_norm": 0.31,
|
121 |
+
"acc_norm_stderr": 0.04648231987117316
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5185185185185185,
|
125 |
+
"acc_stderr": 0.04830366024635331,
|
126 |
+
"acc_norm": 0.5185185185185185,
|
127 |
+
"acc_norm_stderr": 0.04830366024635331
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4236453201970443,
|
131 |
+
"acc_stderr": 0.034767257476490364,
|
132 |
+
"acc_norm": 0.4236453201970443,
|
133 |
+
"acc_norm_stderr": 0.034767257476490364
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.41935483870967744,
|
137 |
+
"acc_stderr": 0.02807158890109185,
|
138 |
+
"acc_norm": 0.41935483870967744,
|
139 |
+
"acc_norm_stderr": 0.02807158890109185
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6367521367521367,
|
143 |
+
"acc_stderr": 0.03150712523091265,
|
144 |
+
"acc_norm": 0.6367521367521367,
|
145 |
+
"acc_norm_stderr": 0.03150712523091265
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.41509433962264153,
|
149 |
+
"acc_stderr": 0.03032594578928611,
|
150 |
+
"acc_norm": 0.41509433962264153,
|
151 |
+
"acc_norm_stderr": 0.03032594578928611
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4818181818181818,
|
155 |
+
"acc_stderr": 0.04785964010794915,
|
156 |
+
"acc_norm": 0.4818181818181818,
|
157 |
+
"acc_norm_stderr": 0.04785964010794915
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.28888888888888886,
|
161 |
+
"acc_stderr": 0.027634907264178544,
|
162 |
+
"acc_norm": 0.28888888888888886,
|
163 |
+
"acc_norm_stderr": 0.027634907264178544
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.32450331125827814,
|
167 |
+
"acc_stderr": 0.038227469376587525,
|
168 |
+
"acc_norm": 0.32450331125827814,
|
169 |
+
"acc_norm_stderr": 0.038227469376587525
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5124378109452736,
|
173 |
+
"acc_stderr": 0.0353443984853958,
|
174 |
+
"acc_norm": 0.5124378109452736,
|
175 |
+
"acc_norm_stderr": 0.0353443984853958
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3815028901734104,
|
179 |
+
"acc_stderr": 0.03703851193099521,
|
180 |
+
"acc_norm": 0.3815028901734104,
|
181 |
+
"acc_norm_stderr": 0.03703851193099521
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.30952380952380953,
|
185 |
+
"acc_stderr": 0.023809523809523857,
|
186 |
+
"acc_norm": 0.30952380952380953,
|
187 |
+
"acc_norm_stderr": 0.023809523809523857
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3125,
|
191 |
+
"acc_stderr": 0.038760854559127644,
|
192 |
+
"acc_norm": 0.3125,
|
193 |
+
"acc_norm_stderr": 0.038760854559127644
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.27,
|
197 |
+
"acc_stderr": 0.044619604333847394,
|
198 |
+
"acc_norm": 0.27,
|
199 |
+
"acc_norm_stderr": 0.044619604333847394
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.59,
|
203 |
+
"acc_stderr": 0.04943110704237101,
|
204 |
+
"acc_norm": 0.59,
|
205 |
+
"acc_norm_stderr": 0.04943110704237101
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5,
|
209 |
+
"acc_stderr": 0.026919095102908273,
|
210 |
+
"acc_norm": 0.5,
|
211 |
+
"acc_norm_stderr": 0.026919095102908273
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.4601226993865031,
|
215 |
+
"acc_stderr": 0.03915857291436971,
|
216 |
+
"acc_norm": 0.4601226993865031,
|
217 |
+
"acc_norm_stderr": 0.03915857291436971
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4444444444444444,
|
221 |
+
"acc_stderr": 0.027648477877413317,
|
222 |
+
"acc_norm": 0.4444444444444444,
|
223 |
+
"acc_norm_stderr": 0.027648477877413317
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.32,
|
227 |
+
"acc_stderr": 0.04688261722621504,
|
228 |
+
"acc_norm": 0.32,
|
229 |
+
"acc_norm_stderr": 0.04688261722621504
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5233160621761658,
|
233 |
+
"acc_stderr": 0.036045136724422014,
|
234 |
+
"acc_norm": 0.5233160621761658,
|
235 |
+
"acc_norm_stderr": 0.036045136724422014
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2894736842105263,
|
239 |
+
"acc_stderr": 0.04266339443159394,
|
240 |
+
"acc_norm": 0.2894736842105263,
|
241 |
+
"acc_norm_stderr": 0.04266339443159394
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.48990825688073397,
|
245 |
+
"acc_stderr": 0.021432956203453316,
|
246 |
+
"acc_norm": 0.48990825688073397,
|
247 |
+
"acc_norm_stderr": 0.021432956203453316
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.20634920634920634,
|
251 |
+
"acc_stderr": 0.0361960452412425,
|
252 |
+
"acc_norm": 0.20634920634920634,
|
253 |
+
"acc_norm_stderr": 0.0361960452412425
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4019607843137255,
|
257 |
+
"acc_stderr": 0.02807415894760066,
|
258 |
+
"acc_norm": 0.4019607843137255,
|
259 |
+
"acc_norm_stderr": 0.02807415894760066
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.45,
|
263 |
+
"acc_stderr": 0.05,
|
264 |
+
"acc_norm": 0.45,
|
265 |
+
"acc_norm_stderr": 0.05
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6115702479338843,
|
269 |
+
"acc_stderr": 0.04449270350068383,
|
270 |
+
"acc_norm": 0.6115702479338843,
|
271 |
+
"acc_norm_stderr": 0.04449270350068383
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3684210526315789,
|
275 |
+
"acc_stderr": 0.03925523381052932,
|
276 |
+
"acc_norm": 0.3684210526315789,
|
277 |
+
"acc_norm_stderr": 0.03925523381052932
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.4035947712418301,
|
281 |
+
"acc_stderr": 0.01984828016840117,
|
282 |
+
"acc_norm": 0.4035947712418301,
|
283 |
+
"acc_norm_stderr": 0.01984828016840117
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2730496453900709,
|
287 |
+
"acc_stderr": 0.026577860943307854,
|
288 |
+
"acc_norm": 0.2730496453900709,
|
289 |
+
"acc_norm_stderr": 0.026577860943307854
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.26785714285714285,
|
293 |
+
"acc_stderr": 0.04203277291467762,
|
294 |
+
"acc_norm": 0.26785714285714285,
|
295 |
+
"acc_norm_stderr": 0.04203277291467762
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.24074074074074073,
|
299 |
+
"acc_stderr": 0.029157522184605593,
|
300 |
+
"acc_norm": 0.24074074074074073,
|
301 |
+
"acc_norm_stderr": 0.029157522184605593
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2424581005586592,
|
305 |
+
"acc_stderr": 0.01433352205921789,
|
306 |
+
"acc_norm": 0.2424581005586592,
|
307 |
+
"acc_norm_stderr": 0.01433352205921789
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.36,
|
311 |
+
"acc_stderr": 0.04824181513244218,
|
312 |
+
"acc_norm": 0.36,
|
313 |
+
"acc_norm_stderr": 0.04824181513244218
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.48,
|
317 |
+
"acc_stderr": 0.050211673156867795,
|
318 |
+
"acc_norm": 0.48,
|
319 |
+
"acc_norm_stderr": 0.050211673156867795
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3088235294117647,
|
323 |
+
"acc_stderr": 0.02806499816704009,
|
324 |
+
"acc_norm": 0.3088235294117647,
|
325 |
+
"acc_norm_stderr": 0.02806499816704009
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.3836734693877551,
|
329 |
+
"acc_stderr": 0.031130880396235922,
|
330 |
+
"acc_norm": 0.3836734693877551,
|
331 |
+
"acc_norm_stderr": 0.031130880396235922
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5654008438818565,
|
335 |
+
"acc_stderr": 0.03226759995510145,
|
336 |
+
"acc_norm": 0.5654008438818565,
|
337 |
+
"acc_norm_stderr": 0.03226759995510145
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.318122555410691,
|
341 |
+
"acc_stderr": 0.011895407281104074,
|
342 |
+
"acc_norm": 0.318122555410691,
|
343 |
+
"acc_norm_stderr": 0.011895407281104074
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4411764705882353,
|
347 |
+
"acc_stderr": 0.03484941514429231,
|
348 |
+
"acc_norm": 0.4411764705882353,
|
349 |
+
"acc_norm_stderr": 0.03484941514429231
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.6181818181818182,
|
353 |
+
"acc_stderr": 0.03793713171165634,
|
354 |
+
"acc_norm": 0.6181818181818182,
|
355 |
+
"acc_norm_stderr": 0.03793713171165634
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3108935128518972,
|
359 |
+
"mc1_stderr": 0.016203316673559693,
|
360 |
+
"mc2": 0.474366186048088,
|
361 |
+
"mc2_stderr": 0.01540967506791855
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.5041322314049587,
|
365 |
+
"acc_stderr": 0.017189767032130817,
|
366 |
+
"acc_norm": 0.5525383707201889,
|
367 |
+
"acc_norm_stderr": 0.017095190301500574
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-v1",
|
436 |
+
"model_sha": "13d027c0a2069284308f4992d67a202ac2e50b22",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.39505119453924914,
|
5 |
+
"acc_stderr": 0.014285898292938165,
|
6 |
+
"acc_norm": 0.46075085324232085,
|
7 |
+
"acc_norm_stderr": 0.014566303676636588
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.42640908185620396,
|
11 |
+
"acc_stderr": 0.004935439955031694,
|
12 |
+
"acc_norm": 0.5706034654451304,
|
13 |
+
"acc_norm_stderr": 0.0049397843114489855
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.0383161053282193,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.0383161053282193
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.44660194174757284,
|
23 |
+
"acc_stderr": 0.04922424153458934,
|
24 |
+
"acc_norm": 0.44660194174757284,
|
25 |
+
"acc_norm_stderr": 0.04922424153458934
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5197956577266922,
|
29 |
+
"acc_stderr": 0.017865944827291633,
|
30 |
+
"acc_norm": 0.5197956577266922,
|
31 |
+
"acc_norm_stderr": 0.017865944827291633
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4888888888888889,
|
35 |
+
"acc_stderr": 0.04318275491977976,
|
36 |
+
"acc_norm": 0.4888888888888889,
|
37 |
+
"acc_norm_stderr": 0.04318275491977976
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4,
|
47 |
+
"acc_stderr": 0.03202563076101735,
|
48 |
+
"acc_norm": 0.4,
|
49 |
+
"acc_norm_stderr": 0.03202563076101735
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.4036144578313253,
|
53 |
+
"acc_stderr": 0.03819486140758398,
|
54 |
+
"acc_norm": 0.4036144578313253,
|
55 |
+
"acc_norm_stderr": 0.03819486140758398
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4533762057877814,
|
59 |
+
"acc_stderr": 0.028274359854894245,
|
60 |
+
"acc_norm": 0.4533762057877814,
|
61 |
+
"acc_norm_stderr": 0.028274359854894245
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.47533632286995514,
|
65 |
+
"acc_stderr": 0.03351695167652628,
|
66 |
+
"acc_norm": 0.47533632286995514,
|
67 |
+
"acc_norm_stderr": 0.03351695167652628
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.42748091603053434,
|
71 |
+
"acc_stderr": 0.043389203057924,
|
72 |
+
"acc_norm": 0.42748091603053434,
|
73 |
+
"acc_norm_stderr": 0.043389203057924
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.049604496374885836,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.049604496374885836
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5151515151515151,
|
83 |
+
"acc_stderr": 0.0356071651653106,
|
84 |
+
"acc_norm": 0.5151515151515151,
|
85 |
+
"acc_norm_stderr": 0.0356071651653106
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.45517241379310347,
|
89 |
+
"acc_stderr": 0.04149886942192117,
|
90 |
+
"acc_norm": 0.45517241379310347,
|
91 |
+
"acc_norm_stderr": 0.04149886942192117
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.16666666666666666,
|
95 |
+
"acc_stderr": 0.03708284662416544,
|
96 |
+
"acc_norm": 0.16666666666666666,
|
97 |
+
"acc_norm_stderr": 0.03708284662416544
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.37815126050420167,
|
101 |
+
"acc_stderr": 0.031499305777849054,
|
102 |
+
"acc_norm": 0.37815126050420167,
|
103 |
+
"acc_norm_stderr": 0.031499305777849054
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.3717948717948718,
|
107 |
+
"acc_stderr": 0.024503472557110946,
|
108 |
+
"acc_norm": 0.3717948717948718,
|
109 |
+
"acc_norm_stderr": 0.024503472557110946
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.56,
|
113 |
+
"acc_stderr": 0.04988876515698589,
|
114 |
+
"acc_norm": 0.56,
|
115 |
+
"acc_norm_stderr": 0.04988876515698589
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.31,
|
119 |
+
"acc_stderr": 0.04648231987117316,
|
120 |
+
"acc_norm": 0.31,
|
121 |
+
"acc_norm_stderr": 0.04648231987117316
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5185185185185185,
|
125 |
+
"acc_stderr": 0.04830366024635331,
|
126 |
+
"acc_norm": 0.5185185185185185,
|
127 |
+
"acc_norm_stderr": 0.04830366024635331
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4236453201970443,
|
131 |
+
"acc_stderr": 0.034767257476490364,
|
132 |
+
"acc_norm": 0.4236453201970443,
|
133 |
+
"acc_norm_stderr": 0.034767257476490364
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.41935483870967744,
|
137 |
+
"acc_stderr": 0.02807158890109185,
|
138 |
+
"acc_norm": 0.41935483870967744,
|
139 |
+
"acc_norm_stderr": 0.02807158890109185
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6324786324786325,
|
143 |
+
"acc_stderr": 0.03158539157745636,
|
144 |
+
"acc_norm": 0.6324786324786325,
|
145 |
+
"acc_norm_stderr": 0.03158539157745636
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.41509433962264153,
|
149 |
+
"acc_stderr": 0.03032594578928611,
|
150 |
+
"acc_norm": 0.41509433962264153,
|
151 |
+
"acc_norm_stderr": 0.03032594578928611
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4909090909090909,
|
155 |
+
"acc_stderr": 0.04788339768702861,
|
156 |
+
"acc_norm": 0.4909090909090909,
|
157 |
+
"acc_norm_stderr": 0.04788339768702861
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.29259259259259257,
|
161 |
+
"acc_stderr": 0.02773896963217609,
|
162 |
+
"acc_norm": 0.29259259259259257,
|
163 |
+
"acc_norm_stderr": 0.02773896963217609
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.32450331125827814,
|
167 |
+
"acc_stderr": 0.038227469376587525,
|
168 |
+
"acc_norm": 0.32450331125827814,
|
169 |
+
"acc_norm_stderr": 0.038227469376587525
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5124378109452736,
|
173 |
+
"acc_stderr": 0.0353443984853958,
|
174 |
+
"acc_norm": 0.5124378109452736,
|
175 |
+
"acc_norm_stderr": 0.0353443984853958
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3815028901734104,
|
179 |
+
"acc_stderr": 0.03703851193099521,
|
180 |
+
"acc_norm": 0.3815028901734104,
|
181 |
+
"acc_norm_stderr": 0.03703851193099521
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.30952380952380953,
|
185 |
+
"acc_stderr": 0.023809523809523857,
|
186 |
+
"acc_norm": 0.30952380952380953,
|
187 |
+
"acc_norm_stderr": 0.023809523809523857
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3194444444444444,
|
191 |
+
"acc_stderr": 0.03899073687357335,
|
192 |
+
"acc_norm": 0.3194444444444444,
|
193 |
+
"acc_norm_stderr": 0.03899073687357335
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.26,
|
197 |
+
"acc_stderr": 0.044084400227680794,
|
198 |
+
"acc_norm": 0.26,
|
199 |
+
"acc_norm_stderr": 0.044084400227680794
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.59,
|
203 |
+
"acc_stderr": 0.04943110704237101,
|
204 |
+
"acc_norm": 0.59,
|
205 |
+
"acc_norm_stderr": 0.04943110704237101
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5028901734104047,
|
209 |
+
"acc_stderr": 0.026918645383239015,
|
210 |
+
"acc_norm": 0.5028901734104047,
|
211 |
+
"acc_norm_stderr": 0.026918645383239015
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.4601226993865031,
|
215 |
+
"acc_stderr": 0.03915857291436971,
|
216 |
+
"acc_norm": 0.4601226993865031,
|
217 |
+
"acc_norm_stderr": 0.03915857291436971
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.44753086419753085,
|
221 |
+
"acc_stderr": 0.02766713856942271,
|
222 |
+
"acc_norm": 0.44753086419753085,
|
223 |
+
"acc_norm_stderr": 0.02766713856942271
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.31,
|
227 |
+
"acc_stderr": 0.04648231987117316,
|
228 |
+
"acc_norm": 0.31,
|
229 |
+
"acc_norm_stderr": 0.04648231987117316
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5233160621761658,
|
233 |
+
"acc_stderr": 0.036045136724422014,
|
234 |
+
"acc_norm": 0.5233160621761658,
|
235 |
+
"acc_norm_stderr": 0.036045136724422014
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2894736842105263,
|
239 |
+
"acc_stderr": 0.04266339443159394,
|
240 |
+
"acc_norm": 0.2894736842105263,
|
241 |
+
"acc_norm_stderr": 0.04266339443159394
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.4917431192660551,
|
245 |
+
"acc_stderr": 0.021434399918214334,
|
246 |
+
"acc_norm": 0.4917431192660551,
|
247 |
+
"acc_norm_stderr": 0.021434399918214334
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.20634920634920634,
|
251 |
+
"acc_stderr": 0.0361960452412425,
|
252 |
+
"acc_norm": 0.20634920634920634,
|
253 |
+
"acc_norm_stderr": 0.0361960452412425
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.39869281045751637,
|
257 |
+
"acc_stderr": 0.02803609227389177,
|
258 |
+
"acc_norm": 0.39869281045751637,
|
259 |
+
"acc_norm_stderr": 0.02803609227389177
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.45,
|
263 |
+
"acc_stderr": 0.05,
|
264 |
+
"acc_norm": 0.45,
|
265 |
+
"acc_norm_stderr": 0.05
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6115702479338843,
|
269 |
+
"acc_stderr": 0.04449270350068383,
|
270 |
+
"acc_norm": 0.6115702479338843,
|
271 |
+
"acc_norm_stderr": 0.04449270350068383
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3684210526315789,
|
275 |
+
"acc_stderr": 0.03925523381052932,
|
276 |
+
"acc_norm": 0.3684210526315789,
|
277 |
+
"acc_norm_stderr": 0.03925523381052932
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.4035947712418301,
|
281 |
+
"acc_stderr": 0.01984828016840117,
|
282 |
+
"acc_norm": 0.4035947712418301,
|
283 |
+
"acc_norm_stderr": 0.01984828016840117
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2730496453900709,
|
287 |
+
"acc_stderr": 0.026577860943307854,
|
288 |
+
"acc_norm": 0.2730496453900709,
|
289 |
+
"acc_norm_stderr": 0.026577860943307854
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.26785714285714285,
|
293 |
+
"acc_stderr": 0.04203277291467762,
|
294 |
+
"acc_norm": 0.26785714285714285,
|
295 |
+
"acc_norm_stderr": 0.04203277291467762
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.24074074074074073,
|
299 |
+
"acc_stderr": 0.029157522184605593,
|
300 |
+
"acc_norm": 0.24074074074074073,
|
301 |
+
"acc_norm_stderr": 0.029157522184605593
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2424581005586592,
|
305 |
+
"acc_stderr": 0.01433352205921789,
|
306 |
+
"acc_norm": 0.2424581005586592,
|
307 |
+
"acc_norm_stderr": 0.01433352205921789
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.36,
|
311 |
+
"acc_stderr": 0.04824181513244218,
|
312 |
+
"acc_norm": 0.36,
|
313 |
+
"acc_norm_stderr": 0.04824181513244218
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.48,
|
317 |
+
"acc_stderr": 0.050211673156867795,
|
318 |
+
"acc_norm": 0.48,
|
319 |
+
"acc_norm_stderr": 0.050211673156867795
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3125,
|
323 |
+
"acc_stderr": 0.02815637344037142,
|
324 |
+
"acc_norm": 0.3125,
|
325 |
+
"acc_norm_stderr": 0.02815637344037142
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.3836734693877551,
|
329 |
+
"acc_stderr": 0.031130880396235922,
|
330 |
+
"acc_norm": 0.3836734693877551,
|
331 |
+
"acc_norm_stderr": 0.031130880396235922
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.569620253164557,
|
335 |
+
"acc_stderr": 0.03223017195937598,
|
336 |
+
"acc_norm": 0.569620253164557,
|
337 |
+
"acc_norm_stderr": 0.03223017195937598
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.31747066492829207,
|
341 |
+
"acc_stderr": 0.011888892068809309,
|
342 |
+
"acc_norm": 0.31747066492829207,
|
343 |
+
"acc_norm_stderr": 0.011888892068809309
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4411764705882353,
|
347 |
+
"acc_stderr": 0.034849415144292316,
|
348 |
+
"acc_norm": 0.4411764705882353,
|
349 |
+
"acc_norm_stderr": 0.034849415144292316
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.6121212121212121,
|
353 |
+
"acc_stderr": 0.0380491365397101,
|
354 |
+
"acc_norm": 0.6121212121212121,
|
355 |
+
"acc_norm_stderr": 0.0380491365397101
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3084455324357405,
|
359 |
+
"mc1_stderr": 0.01616803938315687,
|
360 |
+
"mc2": 0.47439440606323957,
|
361 |
+
"mc2_stderr": 0.015414552807155835
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.5088547815820543,
|
365 |
+
"acc_stderr": 0.01718765819933674,
|
366 |
+
"acc_norm": 0.5548996458087367,
|
367 |
+
"acc_norm_stderr": 0.017086417431005464
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-v2",
|
436 |
+
"model_sha": "6fd9c176286458a9e802d0955a243f7b538c8e1c",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3984641638225256,
|
5 |
+
"acc_stderr": 0.014306946052735563,
|
6 |
+
"acc_norm": 0.4616040955631399,
|
7 |
+
"acc_norm_stderr": 0.01456824555029636
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.42162915753833896,
|
11 |
+
"acc_stderr": 0.004928105880776079,
|
12 |
+
"acc_norm": 0.5677155945030871,
|
13 |
+
"acc_norm_stderr": 0.004943809330692697
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5555555555555556,
|
17 |
+
"acc_stderr": 0.038110796698335316,
|
18 |
+
"acc_norm": 0.5555555555555556,
|
19 |
+
"acc_norm_stderr": 0.038110796698335316
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5436893203883495,
|
23 |
+
"acc_stderr": 0.049318019942204146,
|
24 |
+
"acc_norm": 0.5436893203883495,
|
25 |
+
"acc_norm_stderr": 0.049318019942204146
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5491698595146871,
|
29 |
+
"acc_stderr": 0.01779329757269903,
|
30 |
+
"acc_norm": 0.5491698595146871,
|
31 |
+
"acc_norm_stderr": 0.01779329757269903
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.5037037037037037,
|
35 |
+
"acc_stderr": 0.043192236258113324,
|
36 |
+
"acc_norm": 0.5037037037037037,
|
37 |
+
"acc_norm_stderr": 0.043192236258113324
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.28,
|
41 |
+
"acc_stderr": 0.045126085985421255,
|
42 |
+
"acc_norm": 0.28,
|
43 |
+
"acc_norm_stderr": 0.045126085985421255
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3574468085106383,
|
47 |
+
"acc_stderr": 0.03132941789476425,
|
48 |
+
"acc_norm": 0.3574468085106383,
|
49 |
+
"acc_norm_stderr": 0.03132941789476425
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3855421686746988,
|
53 |
+
"acc_stderr": 0.03789134424611548,
|
54 |
+
"acc_norm": 0.3855421686746988,
|
55 |
+
"acc_norm_stderr": 0.03789134424611548
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.48231511254019294,
|
59 |
+
"acc_stderr": 0.02838032284907713,
|
60 |
+
"acc_norm": 0.48231511254019294,
|
61 |
+
"acc_norm_stderr": 0.02838032284907713
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.47533632286995514,
|
65 |
+
"acc_stderr": 0.03351695167652628,
|
66 |
+
"acc_norm": 0.47533632286995514,
|
67 |
+
"acc_norm_stderr": 0.03351695167652628
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.45038167938931295,
|
71 |
+
"acc_stderr": 0.04363643698524779,
|
72 |
+
"acc_norm": 0.45038167938931295,
|
73 |
+
"acc_norm_stderr": 0.04363643698524779
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.049604496374885836,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.049604496374885836
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5555555555555556,
|
83 |
+
"acc_stderr": 0.035402943770953675,
|
84 |
+
"acc_norm": 0.5555555555555556,
|
85 |
+
"acc_norm_stderr": 0.035402943770953675
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3793103448275862,
|
89 |
+
"acc_stderr": 0.04043461861916748,
|
90 |
+
"acc_norm": 0.3793103448275862,
|
91 |
+
"acc_norm_stderr": 0.04043461861916748
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.17647058823529413,
|
95 |
+
"acc_stderr": 0.03793281185307811,
|
96 |
+
"acc_norm": 0.17647058823529413,
|
97 |
+
"acc_norm_stderr": 0.03793281185307811
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4369747899159664,
|
101 |
+
"acc_stderr": 0.03221943636566196,
|
102 |
+
"acc_norm": 0.4369747899159664,
|
103 |
+
"acc_norm_stderr": 0.03221943636566196
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4205128205128205,
|
107 |
+
"acc_stderr": 0.025028610276710855,
|
108 |
+
"acc_norm": 0.4205128205128205,
|
109 |
+
"acc_norm_stderr": 0.025028610276710855
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.52,
|
113 |
+
"acc_stderr": 0.050211673156867795,
|
114 |
+
"acc_norm": 0.52,
|
115 |
+
"acc_norm_stderr": 0.050211673156867795
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.34,
|
119 |
+
"acc_stderr": 0.04760952285695235,
|
120 |
+
"acc_norm": 0.34,
|
121 |
+
"acc_norm_stderr": 0.04760952285695235
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.48148148148148145,
|
125 |
+
"acc_stderr": 0.04830366024635331,
|
126 |
+
"acc_norm": 0.48148148148148145,
|
127 |
+
"acc_norm_stderr": 0.04830366024635331
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35960591133004927,
|
131 |
+
"acc_stderr": 0.033764582465095665,
|
132 |
+
"acc_norm": 0.35960591133004927,
|
133 |
+
"acc_norm_stderr": 0.033764582465095665
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.42258064516129035,
|
137 |
+
"acc_stderr": 0.02810096472427264,
|
138 |
+
"acc_norm": 0.42258064516129035,
|
139 |
+
"acc_norm_stderr": 0.02810096472427264
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.5982905982905983,
|
143 |
+
"acc_stderr": 0.03211693751051622,
|
144 |
+
"acc_norm": 0.5982905982905983,
|
145 |
+
"acc_norm_stderr": 0.03211693751051622
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4188679245283019,
|
149 |
+
"acc_stderr": 0.030365050829115208,
|
150 |
+
"acc_norm": 0.4188679245283019,
|
151 |
+
"acc_norm_stderr": 0.030365050829115208
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4727272727272727,
|
155 |
+
"acc_stderr": 0.04782001791380063,
|
156 |
+
"acc_norm": 0.4727272727272727,
|
157 |
+
"acc_norm_stderr": 0.04782001791380063
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.26296296296296295,
|
161 |
+
"acc_stderr": 0.02684205787383371,
|
162 |
+
"acc_norm": 0.26296296296296295,
|
163 |
+
"acc_norm_stderr": 0.02684205787383371
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.33112582781456956,
|
167 |
+
"acc_stderr": 0.038425817186598696,
|
168 |
+
"acc_norm": 0.33112582781456956,
|
169 |
+
"acc_norm_stderr": 0.038425817186598696
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5024875621890548,
|
173 |
+
"acc_stderr": 0.03535490150137289,
|
174 |
+
"acc_norm": 0.5024875621890548,
|
175 |
+
"acc_norm_stderr": 0.03535490150137289
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4508670520231214,
|
179 |
+
"acc_stderr": 0.037940126746970296,
|
180 |
+
"acc_norm": 0.4508670520231214,
|
181 |
+
"acc_norm_stderr": 0.037940126746970296
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.291005291005291,
|
185 |
+
"acc_stderr": 0.023393826500484875,
|
186 |
+
"acc_norm": 0.291005291005291,
|
187 |
+
"acc_norm_stderr": 0.023393826500484875
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3541666666666667,
|
191 |
+
"acc_stderr": 0.039994111357535424,
|
192 |
+
"acc_norm": 0.3541666666666667,
|
193 |
+
"acc_norm_stderr": 0.039994111357535424
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.22,
|
197 |
+
"acc_stderr": 0.04163331998932269,
|
198 |
+
"acc_norm": 0.22,
|
199 |
+
"acc_norm_stderr": 0.04163331998932269
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.62,
|
203 |
+
"acc_stderr": 0.048783173121456344,
|
204 |
+
"acc_norm": 0.62,
|
205 |
+
"acc_norm_stderr": 0.048783173121456344
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5,
|
209 |
+
"acc_stderr": 0.026919095102908273,
|
210 |
+
"acc_norm": 0.5,
|
211 |
+
"acc_norm_stderr": 0.026919095102908273
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.5153374233128835,
|
215 |
+
"acc_stderr": 0.03926522378708843,
|
216 |
+
"acc_norm": 0.5153374233128835,
|
217 |
+
"acc_norm_stderr": 0.03926522378708843
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.5,
|
221 |
+
"acc_stderr": 0.02782074420373286,
|
222 |
+
"acc_norm": 0.5,
|
223 |
+
"acc_norm_stderr": 0.02782074420373286
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.34,
|
227 |
+
"acc_stderr": 0.04760952285695235,
|
228 |
+
"acc_norm": 0.34,
|
229 |
+
"acc_norm_stderr": 0.04760952285695235
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5181347150259067,
|
233 |
+
"acc_stderr": 0.03606065001832919,
|
234 |
+
"acc_norm": 0.5181347150259067,
|
235 |
+
"acc_norm_stderr": 0.03606065001832919
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2894736842105263,
|
239 |
+
"acc_stderr": 0.04266339443159394,
|
240 |
+
"acc_norm": 0.2894736842105263,
|
241 |
+
"acc_norm_stderr": 0.04266339443159394
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5431192660550459,
|
245 |
+
"acc_stderr": 0.021357458785226224,
|
246 |
+
"acc_norm": 0.5431192660550459,
|
247 |
+
"acc_norm_stderr": 0.021357458785226224
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.30158730158730157,
|
251 |
+
"acc_stderr": 0.04104947269903394,
|
252 |
+
"acc_norm": 0.30158730158730157,
|
253 |
+
"acc_norm_stderr": 0.04104947269903394
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.3954248366013072,
|
257 |
+
"acc_stderr": 0.02799672318063145,
|
258 |
+
"acc_norm": 0.3954248366013072,
|
259 |
+
"acc_norm_stderr": 0.02799672318063145
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.44,
|
263 |
+
"acc_stderr": 0.04988876515698589,
|
264 |
+
"acc_norm": 0.44,
|
265 |
+
"acc_norm_stderr": 0.04988876515698589
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6198347107438017,
|
269 |
+
"acc_stderr": 0.04431324501968431,
|
270 |
+
"acc_norm": 0.6198347107438017,
|
271 |
+
"acc_norm_stderr": 0.04431324501968431
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.42105263157894735,
|
275 |
+
"acc_stderr": 0.04017901275981748,
|
276 |
+
"acc_norm": 0.42105263157894735,
|
277 |
+
"acc_norm_stderr": 0.04017901275981748
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3937908496732026,
|
281 |
+
"acc_stderr": 0.01976621199107307,
|
282 |
+
"acc_norm": 0.3937908496732026,
|
283 |
+
"acc_norm_stderr": 0.01976621199107307
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3404255319148936,
|
287 |
+
"acc_stderr": 0.02826765748265015,
|
288 |
+
"acc_norm": 0.3404255319148936,
|
289 |
+
"acc_norm_stderr": 0.02826765748265015
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.2767857142857143,
|
293 |
+
"acc_stderr": 0.04246624336697624,
|
294 |
+
"acc_norm": 0.2767857142857143,
|
295 |
+
"acc_norm_stderr": 0.04246624336697624
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.41203703703703703,
|
299 |
+
"acc_stderr": 0.03356787758160835,
|
300 |
+
"acc_norm": 0.41203703703703703,
|
301 |
+
"acc_norm_stderr": 0.03356787758160835
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.29497206703910617,
|
305 |
+
"acc_stderr": 0.015251931579208185,
|
306 |
+
"acc_norm": 0.29497206703910617,
|
307 |
+
"acc_norm_stderr": 0.015251931579208185
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.37,
|
311 |
+
"acc_stderr": 0.048523658709391,
|
312 |
+
"acc_norm": 0.37,
|
313 |
+
"acc_norm_stderr": 0.048523658709391
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.44,
|
317 |
+
"acc_stderr": 0.04988876515698589,
|
318 |
+
"acc_norm": 0.44,
|
319 |
+
"acc_norm_stderr": 0.04988876515698589
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3860294117647059,
|
323 |
+
"acc_stderr": 0.029573269134411124,
|
324 |
+
"acc_norm": 0.3860294117647059,
|
325 |
+
"acc_norm_stderr": 0.029573269134411124
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.46938775510204084,
|
329 |
+
"acc_stderr": 0.031949171367580624,
|
330 |
+
"acc_norm": 0.46938775510204084,
|
331 |
+
"acc_norm_stderr": 0.031949171367580624
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6919831223628692,
|
335 |
+
"acc_stderr": 0.030052389335605695,
|
336 |
+
"acc_norm": 0.6919831223628692,
|
337 |
+
"acc_norm_stderr": 0.030052389335605695
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3683181225554107,
|
341 |
+
"acc_stderr": 0.012319403369564639,
|
342 |
+
"acc_norm": 0.3683181225554107,
|
343 |
+
"acc_norm_stderr": 0.012319403369564639
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.5245098039215687,
|
347 |
+
"acc_stderr": 0.03505093194348798,
|
348 |
+
"acc_norm": 0.5245098039215687,
|
349 |
+
"acc_norm_stderr": 0.03505093194348798
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5757575757575758,
|
353 |
+
"acc_stderr": 0.03859268142070262,
|
354 |
+
"acc_norm": 0.5757575757575758,
|
355 |
+
"acc_norm_stderr": 0.03859268142070262
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2962056303549572,
|
359 |
+
"mc1_stderr": 0.015983595101811392,
|
360 |
+
"mc2": 0.458694749783158,
|
361 |
+
"mc2_stderr": 0.015135220490705375
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.45336481700118064,
|
365 |
+
"acc_stderr": 0.017115418225226862,
|
366 |
+
"acc_norm": 0.564344746162928,
|
367 |
+
"acc_norm_stderr": 0.017047415229476313
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-20B-v1",
|
436 |
+
"model_sha": "4de05113ecc02aa2da28893d8e2827912ebe0d20",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3848122866894198,
|
5 |
+
"acc_stderr": 0.014218371065251095,
|
6 |
+
"acc_norm": 0.4402730375426621,
|
7 |
+
"acc_norm_stderr": 0.014506769524804243
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.4190400318661621,
|
11 |
+
"acc_stderr": 0.0049239357498424945,
|
12 |
+
"acc_norm": 0.5560645289782912,
|
13 |
+
"acc_norm_stderr": 0.004958314114266494
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5614035087719298,
|
17 |
+
"acc_stderr": 0.038057975055904594,
|
18 |
+
"acc_norm": 0.5614035087719298,
|
19 |
+
"acc_norm_stderr": 0.038057975055904594
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.6019417475728155,
|
23 |
+
"acc_stderr": 0.04846748253977238,
|
24 |
+
"acc_norm": 0.6019417475728155,
|
25 |
+
"acc_norm_stderr": 0.04846748253977238
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5287356321839081,
|
29 |
+
"acc_stderr": 0.017850410794380173,
|
30 |
+
"acc_norm": 0.5287356321839081,
|
31 |
+
"acc_norm_stderr": 0.017850410794380173
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4222222222222222,
|
35 |
+
"acc_stderr": 0.04266763404099582,
|
36 |
+
"acc_norm": 0.4222222222222222,
|
37 |
+
"acc_norm_stderr": 0.04266763404099582
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.28,
|
41 |
+
"acc_stderr": 0.045126085985421276,
|
42 |
+
"acc_norm": 0.28,
|
43 |
+
"acc_norm_stderr": 0.045126085985421276
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3404255319148936,
|
47 |
+
"acc_stderr": 0.030976692998534443,
|
48 |
+
"acc_norm": 0.3404255319148936,
|
49 |
+
"acc_norm_stderr": 0.030976692998534443
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.39759036144578314,
|
53 |
+
"acc_stderr": 0.038099730845402184,
|
54 |
+
"acc_norm": 0.39759036144578314,
|
55 |
+
"acc_norm_stderr": 0.038099730845402184
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5273311897106109,
|
59 |
+
"acc_stderr": 0.028355633568328188,
|
60 |
+
"acc_norm": 0.5273311897106109,
|
61 |
+
"acc_norm_stderr": 0.028355633568328188
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4798206278026906,
|
65 |
+
"acc_stderr": 0.033530461674123,
|
66 |
+
"acc_norm": 0.4798206278026906,
|
67 |
+
"acc_norm_stderr": 0.033530461674123
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5114503816793893,
|
71 |
+
"acc_stderr": 0.043841400240780176,
|
72 |
+
"acc_norm": 0.5114503816793893,
|
73 |
+
"acc_norm_stderr": 0.043841400240780176
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.39,
|
77 |
+
"acc_stderr": 0.04902071300001975,
|
78 |
+
"acc_norm": 0.39,
|
79 |
+
"acc_norm_stderr": 0.04902071300001975
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5353535353535354,
|
83 |
+
"acc_stderr": 0.03553436368828061,
|
84 |
+
"acc_norm": 0.5353535353535354,
|
85 |
+
"acc_norm_stderr": 0.03553436368828061
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4827586206896552,
|
89 |
+
"acc_stderr": 0.04164188720169377,
|
90 |
+
"acc_norm": 0.4827586206896552,
|
91 |
+
"acc_norm_stderr": 0.04164188720169377
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.23529411764705882,
|
95 |
+
"acc_stderr": 0.04220773659171452,
|
96 |
+
"acc_norm": 0.23529411764705882,
|
97 |
+
"acc_norm_stderr": 0.04220773659171452
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.49159663865546216,
|
101 |
+
"acc_stderr": 0.03247390276569669,
|
102 |
+
"acc_norm": 0.49159663865546216,
|
103 |
+
"acc_norm_stderr": 0.03247390276569669
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.46153846153846156,
|
107 |
+
"acc_stderr": 0.025275892070240634,
|
108 |
+
"acc_norm": 0.46153846153846156,
|
109 |
+
"acc_norm_stderr": 0.025275892070240634
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.45,
|
113 |
+
"acc_stderr": 0.05,
|
114 |
+
"acc_norm": 0.45,
|
115 |
+
"acc_norm_stderr": 0.05
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.37,
|
119 |
+
"acc_stderr": 0.048523658709391,
|
120 |
+
"acc_norm": 0.37,
|
121 |
+
"acc_norm_stderr": 0.048523658709391
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5277777777777778,
|
125 |
+
"acc_stderr": 0.04826217294139894,
|
126 |
+
"acc_norm": 0.5277777777777778,
|
127 |
+
"acc_norm_stderr": 0.04826217294139894
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3399014778325123,
|
131 |
+
"acc_stderr": 0.033327690684107895,
|
132 |
+
"acc_norm": 0.3399014778325123,
|
133 |
+
"acc_norm_stderr": 0.033327690684107895
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.4806451612903226,
|
137 |
+
"acc_stderr": 0.0284226874043121,
|
138 |
+
"acc_norm": 0.4806451612903226,
|
139 |
+
"acc_norm_stderr": 0.0284226874043121
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6752136752136753,
|
143 |
+
"acc_stderr": 0.03067902276549883,
|
144 |
+
"acc_norm": 0.6752136752136753,
|
145 |
+
"acc_norm_stderr": 0.03067902276549883
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.47547169811320755,
|
149 |
+
"acc_stderr": 0.030735822206205615,
|
150 |
+
"acc_norm": 0.47547169811320755,
|
151 |
+
"acc_norm_stderr": 0.030735822206205615
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.5454545454545454,
|
155 |
+
"acc_stderr": 0.04769300568972745,
|
156 |
+
"acc_norm": 0.5454545454545454,
|
157 |
+
"acc_norm_stderr": 0.04769300568972745
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2777777777777778,
|
161 |
+
"acc_stderr": 0.027309140588230172,
|
162 |
+
"acc_norm": 0.2777777777777778,
|
163 |
+
"acc_norm_stderr": 0.027309140588230172
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.33112582781456956,
|
167 |
+
"acc_stderr": 0.038425817186598696,
|
168 |
+
"acc_norm": 0.33112582781456956,
|
169 |
+
"acc_norm_stderr": 0.038425817186598696
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5671641791044776,
|
173 |
+
"acc_stderr": 0.03503490923673282,
|
174 |
+
"acc_norm": 0.5671641791044776,
|
175 |
+
"acc_norm_stderr": 0.03503490923673282
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4393063583815029,
|
179 |
+
"acc_stderr": 0.037842719328874674,
|
180 |
+
"acc_norm": 0.4393063583815029,
|
181 |
+
"acc_norm_stderr": 0.037842719328874674
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3333333333333333,
|
185 |
+
"acc_stderr": 0.0242785680243077,
|
186 |
+
"acc_norm": 0.3333333333333333,
|
187 |
+
"acc_norm_stderr": 0.0242785680243077
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4027777777777778,
|
191 |
+
"acc_stderr": 0.04101405519842425,
|
192 |
+
"acc_norm": 0.4027777777777778,
|
193 |
+
"acc_norm_stderr": 0.04101405519842425
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.34,
|
197 |
+
"acc_stderr": 0.047609522856952344,
|
198 |
+
"acc_norm": 0.34,
|
199 |
+
"acc_norm_stderr": 0.047609522856952344
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.63,
|
203 |
+
"acc_stderr": 0.04852365870939099,
|
204 |
+
"acc_norm": 0.63,
|
205 |
+
"acc_norm_stderr": 0.04852365870939099
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.49421965317919075,
|
209 |
+
"acc_stderr": 0.026917296179149116,
|
210 |
+
"acc_norm": 0.49421965317919075,
|
211 |
+
"acc_norm_stderr": 0.026917296179149116
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.48466257668711654,
|
215 |
+
"acc_stderr": 0.039265223787088445,
|
216 |
+
"acc_norm": 0.48466257668711654,
|
217 |
+
"acc_norm_stderr": 0.039265223787088445
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49074074074074076,
|
221 |
+
"acc_stderr": 0.027815973433878014,
|
222 |
+
"acc_norm": 0.49074074074074076,
|
223 |
+
"acc_norm_stderr": 0.027815973433878014
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.04605661864718381,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.04605661864718381
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5440414507772021,
|
233 |
+
"acc_stderr": 0.03594413711272436,
|
234 |
+
"acc_norm": 0.5440414507772021,
|
235 |
+
"acc_norm_stderr": 0.03594413711272436
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2543859649122807,
|
239 |
+
"acc_stderr": 0.040969851398436716,
|
240 |
+
"acc_norm": 0.2543859649122807,
|
241 |
+
"acc_norm_stderr": 0.040969851398436716
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5559633027522936,
|
245 |
+
"acc_stderr": 0.021302621211654518,
|
246 |
+
"acc_norm": 0.5559633027522936,
|
247 |
+
"acc_norm_stderr": 0.021302621211654518
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.30158730158730157,
|
251 |
+
"acc_stderr": 0.04104947269903394,
|
252 |
+
"acc_norm": 0.30158730158730157,
|
253 |
+
"acc_norm_stderr": 0.04104947269903394
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4673202614379085,
|
257 |
+
"acc_stderr": 0.028568699752225875,
|
258 |
+
"acc_norm": 0.4673202614379085,
|
259 |
+
"acc_norm_stderr": 0.028568699752225875
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.51,
|
263 |
+
"acc_stderr": 0.05024183937956912,
|
264 |
+
"acc_norm": 0.51,
|
265 |
+
"acc_norm_stderr": 0.05024183937956912
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.7272727272727273,
|
269 |
+
"acc_stderr": 0.04065578140908705,
|
270 |
+
"acc_norm": 0.7272727272727273,
|
271 |
+
"acc_norm_stderr": 0.04065578140908705
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.47368421052631576,
|
275 |
+
"acc_stderr": 0.04063302731486671,
|
276 |
+
"acc_norm": 0.47368421052631576,
|
277 |
+
"acc_norm_stderr": 0.04063302731486671
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3741830065359477,
|
281 |
+
"acc_stderr": 0.019576953122088833,
|
282 |
+
"acc_norm": 0.3741830065359477,
|
283 |
+
"acc_norm_stderr": 0.019576953122088833
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.31560283687943264,
|
287 |
+
"acc_stderr": 0.027724989449509314,
|
288 |
+
"acc_norm": 0.31560283687943264,
|
289 |
+
"acc_norm_stderr": 0.027724989449509314
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.22321428571428573,
|
293 |
+
"acc_stderr": 0.039523019677025116,
|
294 |
+
"acc_norm": 0.22321428571428573,
|
295 |
+
"acc_norm_stderr": 0.039523019677025116
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4074074074074074,
|
299 |
+
"acc_stderr": 0.033509916046960436,
|
300 |
+
"acc_norm": 0.4074074074074074,
|
301 |
+
"acc_norm_stderr": 0.033509916046960436
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.264804469273743,
|
305 |
+
"acc_stderr": 0.014756906483260664,
|
306 |
+
"acc_norm": 0.264804469273743,
|
307 |
+
"acc_norm_stderr": 0.014756906483260664
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.35,
|
311 |
+
"acc_stderr": 0.0479372485441102,
|
312 |
+
"acc_norm": 0.35,
|
313 |
+
"acc_norm_stderr": 0.0479372485441102
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.39,
|
317 |
+
"acc_stderr": 0.04902071300001975,
|
318 |
+
"acc_norm": 0.39,
|
319 |
+
"acc_norm_stderr": 0.04902071300001975
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.39705882352941174,
|
323 |
+
"acc_stderr": 0.029722152099280058,
|
324 |
+
"acc_norm": 0.39705882352941174,
|
325 |
+
"acc_norm_stderr": 0.029722152099280058
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.49387755102040815,
|
329 |
+
"acc_stderr": 0.032006820201639086,
|
330 |
+
"acc_norm": 0.49387755102040815,
|
331 |
+
"acc_norm_stderr": 0.032006820201639086
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5738396624472574,
|
335 |
+
"acc_stderr": 0.03219035703131774,
|
336 |
+
"acc_norm": 0.5738396624472574,
|
337 |
+
"acc_norm_stderr": 0.03219035703131774
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.35071707953063885,
|
341 |
+
"acc_stderr": 0.012187773370741518,
|
342 |
+
"acc_norm": 0.35071707953063885,
|
343 |
+
"acc_norm_stderr": 0.012187773370741518
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4950980392156863,
|
347 |
+
"acc_stderr": 0.03509143375606786,
|
348 |
+
"acc_norm": 0.4950980392156863,
|
349 |
+
"acc_norm_stderr": 0.03509143375606786
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5696969696969697,
|
353 |
+
"acc_stderr": 0.03866225962879077,
|
354 |
+
"acc_norm": 0.5696969696969697,
|
355 |
+
"acc_norm_stderr": 0.03866225962879077
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.27539779681762544,
|
359 |
+
"mc1_stderr": 0.015638135667775523,
|
360 |
+
"mc2": 0.44227632802507094,
|
361 |
+
"mc2_stderr": 0.015242459306682204
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.5088547815820543,
|
365 |
+
"acc_stderr": 0.017187658199336743,
|
366 |
+
"acc_norm": 0.5608028335301063,
|
367 |
+
"acc_norm_stderr": 0.017062775744780705
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v2-13b",
|
436 |
+
"model_sha": "9f429309fc6b939d08c659ab4666f6e80324dcd1",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3967576791808874,
|
5 |
+
"acc_stderr": 0.014296513020180646,
|
6 |
+
"acc_norm": 0.454778156996587,
|
7 |
+
"acc_norm_stderr": 0.014551507060836355
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.4164509061939853,
|
11 |
+
"acc_stderr": 0.004919626380645517,
|
12 |
+
"acc_norm": 0.5536745668193587,
|
13 |
+
"acc_norm_stderr": 0.004960947388535101
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5380116959064327,
|
17 |
+
"acc_stderr": 0.038237270928823064,
|
18 |
+
"acc_norm": 0.5380116959064327,
|
19 |
+
"acc_norm_stderr": 0.038237270928823064
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.6019417475728155,
|
23 |
+
"acc_stderr": 0.048467482539772386,
|
24 |
+
"acc_norm": 0.6019417475728155,
|
25 |
+
"acc_norm_stderr": 0.048467482539772386
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5159642401021711,
|
29 |
+
"acc_stderr": 0.017870847506081738,
|
30 |
+
"acc_norm": 0.5159642401021711,
|
31 |
+
"acc_norm_stderr": 0.017870847506081738
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.42962962962962964,
|
35 |
+
"acc_stderr": 0.04276349494376599,
|
36 |
+
"acc_norm": 0.42962962962962964,
|
37 |
+
"acc_norm_stderr": 0.04276349494376599
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.32,
|
41 |
+
"acc_stderr": 0.046882617226215034,
|
42 |
+
"acc_norm": 0.32,
|
43 |
+
"acc_norm_stderr": 0.046882617226215034
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.37446808510638296,
|
47 |
+
"acc_stderr": 0.031639106653672915,
|
48 |
+
"acc_norm": 0.37446808510638296,
|
49 |
+
"acc_norm_stderr": 0.031639106653672915
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.45180722891566266,
|
53 |
+
"acc_stderr": 0.03874371556587953,
|
54 |
+
"acc_norm": 0.45180722891566266,
|
55 |
+
"acc_norm_stderr": 0.03874371556587953
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5627009646302251,
|
59 |
+
"acc_stderr": 0.0281739177617629,
|
60 |
+
"acc_norm": 0.5627009646302251,
|
61 |
+
"acc_norm_stderr": 0.0281739177617629
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.3811659192825112,
|
65 |
+
"acc_stderr": 0.032596251184168284,
|
66 |
+
"acc_norm": 0.3811659192825112,
|
67 |
+
"acc_norm_stderr": 0.032596251184168284
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.46564885496183206,
|
71 |
+
"acc_stderr": 0.043749285605997376,
|
72 |
+
"acc_norm": 0.46564885496183206,
|
73 |
+
"acc_norm_stderr": 0.043749285605997376
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.049604496374885836,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.049604496374885836
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5757575757575758,
|
83 |
+
"acc_stderr": 0.035212249088415866,
|
84 |
+
"acc_norm": 0.5757575757575758,
|
85 |
+
"acc_norm_stderr": 0.035212249088415866
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.45517241379310347,
|
89 |
+
"acc_stderr": 0.04149886942192117,
|
90 |
+
"acc_norm": 0.45517241379310347,
|
91 |
+
"acc_norm_stderr": 0.04149886942192117
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.18627450980392157,
|
95 |
+
"acc_stderr": 0.038739587141493524,
|
96 |
+
"acc_norm": 0.18627450980392157,
|
97 |
+
"acc_norm_stderr": 0.038739587141493524
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4495798319327731,
|
101 |
+
"acc_stderr": 0.03231293497137707,
|
102 |
+
"acc_norm": 0.4495798319327731,
|
103 |
+
"acc_norm_stderr": 0.03231293497137707
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4641025641025641,
|
107 |
+
"acc_stderr": 0.025285585990017834,
|
108 |
+
"acc_norm": 0.4641025641025641,
|
109 |
+
"acc_norm_stderr": 0.025285585990017834
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.46,
|
113 |
+
"acc_stderr": 0.05009082659620333,
|
114 |
+
"acc_norm": 0.46,
|
115 |
+
"acc_norm_stderr": 0.05009082659620333
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.35,
|
119 |
+
"acc_stderr": 0.04793724854411019,
|
120 |
+
"acc_norm": 0.35,
|
121 |
+
"acc_norm_stderr": 0.04793724854411019
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5277777777777778,
|
125 |
+
"acc_stderr": 0.04826217294139894,
|
126 |
+
"acc_norm": 0.5277777777777778,
|
127 |
+
"acc_norm_stderr": 0.04826217294139894
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35467980295566504,
|
131 |
+
"acc_stderr": 0.03366124489051449,
|
132 |
+
"acc_norm": 0.35467980295566504,
|
133 |
+
"acc_norm_stderr": 0.03366124489051449
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.5096774193548387,
|
137 |
+
"acc_stderr": 0.02843867799890955,
|
138 |
+
"acc_norm": 0.5096774193548387,
|
139 |
+
"acc_norm_stderr": 0.02843867799890955
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6837606837606838,
|
143 |
+
"acc_stderr": 0.030463656747340254,
|
144 |
+
"acc_norm": 0.6837606837606838,
|
145 |
+
"acc_norm_stderr": 0.030463656747340254
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4641509433962264,
|
149 |
+
"acc_stderr": 0.030693675018458003,
|
150 |
+
"acc_norm": 0.4641509433962264,
|
151 |
+
"acc_norm_stderr": 0.030693675018458003
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.44545454545454544,
|
155 |
+
"acc_stderr": 0.047605488214603246,
|
156 |
+
"acc_norm": 0.44545454545454544,
|
157 |
+
"acc_norm_stderr": 0.047605488214603246
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2777777777777778,
|
161 |
+
"acc_stderr": 0.02730914058823017,
|
162 |
+
"acc_norm": 0.2777777777777778,
|
163 |
+
"acc_norm_stderr": 0.02730914058823017
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.304635761589404,
|
167 |
+
"acc_stderr": 0.03757949922943343,
|
168 |
+
"acc_norm": 0.304635761589404,
|
169 |
+
"acc_norm_stderr": 0.03757949922943343
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5920398009950248,
|
173 |
+
"acc_stderr": 0.03475116365194092,
|
174 |
+
"acc_norm": 0.5920398009950248,
|
175 |
+
"acc_norm_stderr": 0.03475116365194092
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3988439306358382,
|
179 |
+
"acc_stderr": 0.03733626655383509,
|
180 |
+
"acc_norm": 0.3988439306358382,
|
181 |
+
"acc_norm_stderr": 0.03733626655383509
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2857142857142857,
|
185 |
+
"acc_stderr": 0.02326651221373056,
|
186 |
+
"acc_norm": 0.2857142857142857,
|
187 |
+
"acc_norm_stderr": 0.02326651221373056
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4027777777777778,
|
191 |
+
"acc_stderr": 0.04101405519842424,
|
192 |
+
"acc_norm": 0.4027777777777778,
|
193 |
+
"acc_norm_stderr": 0.04101405519842424
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.36,
|
197 |
+
"acc_stderr": 0.04824181513244218,
|
198 |
+
"acc_norm": 0.36,
|
199 |
+
"acc_norm_stderr": 0.04824181513244218
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.64,
|
203 |
+
"acc_stderr": 0.04824181513244218,
|
204 |
+
"acc_norm": 0.64,
|
205 |
+
"acc_norm_stderr": 0.04824181513244218
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5028901734104047,
|
209 |
+
"acc_stderr": 0.02691864538323901,
|
210 |
+
"acc_norm": 0.5028901734104047,
|
211 |
+
"acc_norm_stderr": 0.02691864538323901
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.4723926380368098,
|
215 |
+
"acc_stderr": 0.03922378290610991,
|
216 |
+
"acc_norm": 0.4723926380368098,
|
217 |
+
"acc_norm_stderr": 0.03922378290610991
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4783950617283951,
|
221 |
+
"acc_stderr": 0.02779476010500874,
|
222 |
+
"acc_norm": 0.4783950617283951,
|
223 |
+
"acc_norm_stderr": 0.02779476010500874
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.28,
|
227 |
+
"acc_stderr": 0.045126085985421296,
|
228 |
+
"acc_norm": 0.28,
|
229 |
+
"acc_norm_stderr": 0.045126085985421296
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.538860103626943,
|
233 |
+
"acc_stderr": 0.035975244117345775,
|
234 |
+
"acc_norm": 0.538860103626943,
|
235 |
+
"acc_norm_stderr": 0.035975244117345775
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.042270544512321984,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.042270544512321984
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5229357798165137,
|
245 |
+
"acc_stderr": 0.0214147570581755,
|
246 |
+
"acc_norm": 0.5229357798165137,
|
247 |
+
"acc_norm_stderr": 0.0214147570581755
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3412698412698413,
|
251 |
+
"acc_stderr": 0.04240799327574925,
|
252 |
+
"acc_norm": 0.3412698412698413,
|
253 |
+
"acc_norm_stderr": 0.04240799327574925
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.477124183006536,
|
257 |
+
"acc_stderr": 0.028599936776089775,
|
258 |
+
"acc_norm": 0.477124183006536,
|
259 |
+
"acc_norm_stderr": 0.028599936776089775
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.45,
|
263 |
+
"acc_stderr": 0.05,
|
264 |
+
"acc_norm": 0.45,
|
265 |
+
"acc_norm_stderr": 0.05
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6776859504132231,
|
269 |
+
"acc_stderr": 0.04266416363352167,
|
270 |
+
"acc_norm": 0.6776859504132231,
|
271 |
+
"acc_norm_stderr": 0.04266416363352167
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.46710526315789475,
|
275 |
+
"acc_stderr": 0.040601270352363966,
|
276 |
+
"acc_norm": 0.46710526315789475,
|
277 |
+
"acc_norm_stderr": 0.040601270352363966
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3660130718954248,
|
281 |
+
"acc_stderr": 0.019488025745529672,
|
282 |
+
"acc_norm": 0.3660130718954248,
|
283 |
+
"acc_norm_stderr": 0.019488025745529672
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.35106382978723405,
|
287 |
+
"acc_stderr": 0.028473501272963775,
|
288 |
+
"acc_norm": 0.35106382978723405,
|
289 |
+
"acc_norm_stderr": 0.028473501272963775
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.25,
|
293 |
+
"acc_stderr": 0.04109974682633932,
|
294 |
+
"acc_norm": 0.25,
|
295 |
+
"acc_norm_stderr": 0.04109974682633932
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.375,
|
299 |
+
"acc_stderr": 0.033016908987210894,
|
300 |
+
"acc_norm": 0.375,
|
301 |
+
"acc_norm_stderr": 0.033016908987210894
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2547486033519553,
|
305 |
+
"acc_stderr": 0.014572650383409167,
|
306 |
+
"acc_norm": 0.2547486033519553,
|
307 |
+
"acc_norm_stderr": 0.014572650383409167
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.45,
|
311 |
+
"acc_stderr": 0.05,
|
312 |
+
"acc_norm": 0.45,
|
313 |
+
"acc_norm_stderr": 0.05
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.42,
|
317 |
+
"acc_stderr": 0.04960449637488584,
|
318 |
+
"acc_norm": 0.42,
|
319 |
+
"acc_norm_stderr": 0.04960449637488584
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.45955882352941174,
|
323 |
+
"acc_stderr": 0.03027332507734575,
|
324 |
+
"acc_norm": 0.45955882352941174,
|
325 |
+
"acc_norm_stderr": 0.03027332507734575
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4897959183673469,
|
329 |
+
"acc_stderr": 0.03200255347893782,
|
330 |
+
"acc_norm": 0.4897959183673469,
|
331 |
+
"acc_norm_stderr": 0.03200255347893782
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6033755274261603,
|
335 |
+
"acc_stderr": 0.03184399873811225,
|
336 |
+
"acc_norm": 0.6033755274261603,
|
337 |
+
"acc_norm_stderr": 0.03184399873811225
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3285528031290743,
|
341 |
+
"acc_stderr": 0.011996027247502912,
|
342 |
+
"acc_norm": 0.3285528031290743,
|
343 |
+
"acc_norm_stderr": 0.011996027247502912
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.49019607843137253,
|
347 |
+
"acc_stderr": 0.035086373586305716,
|
348 |
+
"acc_norm": 0.49019607843137253,
|
349 |
+
"acc_norm_stderr": 0.035086373586305716
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5575757575757576,
|
353 |
+
"acc_stderr": 0.03878372113711275,
|
354 |
+
"acc_norm": 0.5575757575757576,
|
355 |
+
"acc_norm_stderr": 0.03878372113711275
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.27906976744186046,
|
359 |
+
"mc1_stderr": 0.01570210709062789,
|
360 |
+
"mc2": 0.44866578973581106,
|
361 |
+
"mc2_stderr": 0.015416926437342405
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.48406139315230223,
|
365 |
+
"acc_stderr": 0.017181617837190192,
|
366 |
+
"acc_norm": 0.5619834710743802,
|
367 |
+
"acc_norm_stderr": 0.01705775370216029
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v3-13B",
|
436 |
+
"model_sha": "199c2113f09f153bce1ad7aac35e6e756a99b89b",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-v3-13b/result_2023-11-01 18:54:40.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3967576791808874,
|
5 |
+
"acc_stderr": 0.014296513020180646,
|
6 |
+
"acc_norm": 0.454778156996587,
|
7 |
+
"acc_norm_stderr": 0.014551507060836355
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.4164509061939853,
|
11 |
+
"acc_stderr": 0.004919626380645517,
|
12 |
+
"acc_norm": 0.5536745668193587,
|
13 |
+
"acc_norm_stderr": 0.004960947388535101
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5380116959064327,
|
17 |
+
"acc_stderr": 0.038237270928823064,
|
18 |
+
"acc_norm": 0.5380116959064327,
|
19 |
+
"acc_norm_stderr": 0.038237270928823064
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.6019417475728155,
|
23 |
+
"acc_stderr": 0.048467482539772386,
|
24 |
+
"acc_norm": 0.6019417475728155,
|
25 |
+
"acc_norm_stderr": 0.048467482539772386
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5159642401021711,
|
29 |
+
"acc_stderr": 0.017870847506081738,
|
30 |
+
"acc_norm": 0.5159642401021711,
|
31 |
+
"acc_norm_stderr": 0.017870847506081738
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.42962962962962964,
|
35 |
+
"acc_stderr": 0.04276349494376599,
|
36 |
+
"acc_norm": 0.42962962962962964,
|
37 |
+
"acc_norm_stderr": 0.04276349494376599
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.32,
|
41 |
+
"acc_stderr": 0.046882617226215034,
|
42 |
+
"acc_norm": 0.32,
|
43 |
+
"acc_norm_stderr": 0.046882617226215034
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.37446808510638296,
|
47 |
+
"acc_stderr": 0.031639106653672915,
|
48 |
+
"acc_norm": 0.37446808510638296,
|
49 |
+
"acc_norm_stderr": 0.031639106653672915
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.45180722891566266,
|
53 |
+
"acc_stderr": 0.03874371556587953,
|
54 |
+
"acc_norm": 0.45180722891566266,
|
55 |
+
"acc_norm_stderr": 0.03874371556587953
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5627009646302251,
|
59 |
+
"acc_stderr": 0.0281739177617629,
|
60 |
+
"acc_norm": 0.5627009646302251,
|
61 |
+
"acc_norm_stderr": 0.0281739177617629
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.3811659192825112,
|
65 |
+
"acc_stderr": 0.032596251184168284,
|
66 |
+
"acc_norm": 0.3811659192825112,
|
67 |
+
"acc_norm_stderr": 0.032596251184168284
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.46564885496183206,
|
71 |
+
"acc_stderr": 0.043749285605997376,
|
72 |
+
"acc_norm": 0.46564885496183206,
|
73 |
+
"acc_norm_stderr": 0.043749285605997376
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.049604496374885836,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.049604496374885836
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5757575757575758,
|
83 |
+
"acc_stderr": 0.035212249088415866,
|
84 |
+
"acc_norm": 0.5757575757575758,
|
85 |
+
"acc_norm_stderr": 0.035212249088415866
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.45517241379310347,
|
89 |
+
"acc_stderr": 0.04149886942192117,
|
90 |
+
"acc_norm": 0.45517241379310347,
|
91 |
+
"acc_norm_stderr": 0.04149886942192117
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.18627450980392157,
|
95 |
+
"acc_stderr": 0.038739587141493524,
|
96 |
+
"acc_norm": 0.18627450980392157,
|
97 |
+
"acc_norm_stderr": 0.038739587141493524
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4495798319327731,
|
101 |
+
"acc_stderr": 0.03231293497137707,
|
102 |
+
"acc_norm": 0.4495798319327731,
|
103 |
+
"acc_norm_stderr": 0.03231293497137707
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4641025641025641,
|
107 |
+
"acc_stderr": 0.025285585990017834,
|
108 |
+
"acc_norm": 0.4641025641025641,
|
109 |
+
"acc_norm_stderr": 0.025285585990017834
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.46,
|
113 |
+
"acc_stderr": 0.05009082659620333,
|
114 |
+
"acc_norm": 0.46,
|
115 |
+
"acc_norm_stderr": 0.05009082659620333
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.35,
|
119 |
+
"acc_stderr": 0.04793724854411019,
|
120 |
+
"acc_norm": 0.35,
|
121 |
+
"acc_norm_stderr": 0.04793724854411019
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5277777777777778,
|
125 |
+
"acc_stderr": 0.04826217294139894,
|
126 |
+
"acc_norm": 0.5277777777777778,
|
127 |
+
"acc_norm_stderr": 0.04826217294139894
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35467980295566504,
|
131 |
+
"acc_stderr": 0.03366124489051449,
|
132 |
+
"acc_norm": 0.35467980295566504,
|
133 |
+
"acc_norm_stderr": 0.03366124489051449
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.5096774193548387,
|
137 |
+
"acc_stderr": 0.02843867799890955,
|
138 |
+
"acc_norm": 0.5096774193548387,
|
139 |
+
"acc_norm_stderr": 0.02843867799890955
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6837606837606838,
|
143 |
+
"acc_stderr": 0.030463656747340254,
|
144 |
+
"acc_norm": 0.6837606837606838,
|
145 |
+
"acc_norm_stderr": 0.030463656747340254
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4641509433962264,
|
149 |
+
"acc_stderr": 0.030693675018458003,
|
150 |
+
"acc_norm": 0.4641509433962264,
|
151 |
+
"acc_norm_stderr": 0.030693675018458003
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.44545454545454544,
|
155 |
+
"acc_stderr": 0.047605488214603246,
|
156 |
+
"acc_norm": 0.44545454545454544,
|
157 |
+
"acc_norm_stderr": 0.047605488214603246
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2777777777777778,
|
161 |
+
"acc_stderr": 0.02730914058823017,
|
162 |
+
"acc_norm": 0.2777777777777778,
|
163 |
+
"acc_norm_stderr": 0.02730914058823017
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.304635761589404,
|
167 |
+
"acc_stderr": 0.03757949922943343,
|
168 |
+
"acc_norm": 0.304635761589404,
|
169 |
+
"acc_norm_stderr": 0.03757949922943343
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5920398009950248,
|
173 |
+
"acc_stderr": 0.03475116365194092,
|
174 |
+
"acc_norm": 0.5920398009950248,
|
175 |
+
"acc_norm_stderr": 0.03475116365194092
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3988439306358382,
|
179 |
+
"acc_stderr": 0.03733626655383509,
|
180 |
+
"acc_norm": 0.3988439306358382,
|
181 |
+
"acc_norm_stderr": 0.03733626655383509
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2857142857142857,
|
185 |
+
"acc_stderr": 0.02326651221373056,
|
186 |
+
"acc_norm": 0.2857142857142857,
|
187 |
+
"acc_norm_stderr": 0.02326651221373056
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4027777777777778,
|
191 |
+
"acc_stderr": 0.04101405519842424,
|
192 |
+
"acc_norm": 0.4027777777777778,
|
193 |
+
"acc_norm_stderr": 0.04101405519842424
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.36,
|
197 |
+
"acc_stderr": 0.04824181513244218,
|
198 |
+
"acc_norm": 0.36,
|
199 |
+
"acc_norm_stderr": 0.04824181513244218
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.64,
|
203 |
+
"acc_stderr": 0.04824181513244218,
|
204 |
+
"acc_norm": 0.64,
|
205 |
+
"acc_norm_stderr": 0.04824181513244218
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5028901734104047,
|
209 |
+
"acc_stderr": 0.02691864538323901,
|
210 |
+
"acc_norm": 0.5028901734104047,
|
211 |
+
"acc_norm_stderr": 0.02691864538323901
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.4723926380368098,
|
215 |
+
"acc_stderr": 0.03922378290610991,
|
216 |
+
"acc_norm": 0.4723926380368098,
|
217 |
+
"acc_norm_stderr": 0.03922378290610991
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4783950617283951,
|
221 |
+
"acc_stderr": 0.02779476010500874,
|
222 |
+
"acc_norm": 0.4783950617283951,
|
223 |
+
"acc_norm_stderr": 0.02779476010500874
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.28,
|
227 |
+
"acc_stderr": 0.045126085985421296,
|
228 |
+
"acc_norm": 0.28,
|
229 |
+
"acc_norm_stderr": 0.045126085985421296
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.538860103626943,
|
233 |
+
"acc_stderr": 0.035975244117345775,
|
234 |
+
"acc_norm": 0.538860103626943,
|
235 |
+
"acc_norm_stderr": 0.035975244117345775
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.042270544512321984,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.042270544512321984
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5229357798165137,
|
245 |
+
"acc_stderr": 0.0214147570581755,
|
246 |
+
"acc_norm": 0.5229357798165137,
|
247 |
+
"acc_norm_stderr": 0.0214147570581755
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3412698412698413,
|
251 |
+
"acc_stderr": 0.04240799327574925,
|
252 |
+
"acc_norm": 0.3412698412698413,
|
253 |
+
"acc_norm_stderr": 0.04240799327574925
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.477124183006536,
|
257 |
+
"acc_stderr": 0.028599936776089775,
|
258 |
+
"acc_norm": 0.477124183006536,
|
259 |
+
"acc_norm_stderr": 0.028599936776089775
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.45,
|
263 |
+
"acc_stderr": 0.05,
|
264 |
+
"acc_norm": 0.45,
|
265 |
+
"acc_norm_stderr": 0.05
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6776859504132231,
|
269 |
+
"acc_stderr": 0.04266416363352167,
|
270 |
+
"acc_norm": 0.6776859504132231,
|
271 |
+
"acc_norm_stderr": 0.04266416363352167
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.46710526315789475,
|
275 |
+
"acc_stderr": 0.040601270352363966,
|
276 |
+
"acc_norm": 0.46710526315789475,
|
277 |
+
"acc_norm_stderr": 0.040601270352363966
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3660130718954248,
|
281 |
+
"acc_stderr": 0.019488025745529672,
|
282 |
+
"acc_norm": 0.3660130718954248,
|
283 |
+
"acc_norm_stderr": 0.019488025745529672
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.35106382978723405,
|
287 |
+
"acc_stderr": 0.028473501272963775,
|
288 |
+
"acc_norm": 0.35106382978723405,
|
289 |
+
"acc_norm_stderr": 0.028473501272963775
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.25,
|
293 |
+
"acc_stderr": 0.04109974682633932,
|
294 |
+
"acc_norm": 0.25,
|
295 |
+
"acc_norm_stderr": 0.04109974682633932
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.375,
|
299 |
+
"acc_stderr": 0.033016908987210894,
|
300 |
+
"acc_norm": 0.375,
|
301 |
+
"acc_norm_stderr": 0.033016908987210894
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2547486033519553,
|
305 |
+
"acc_stderr": 0.014572650383409167,
|
306 |
+
"acc_norm": 0.2547486033519553,
|
307 |
+
"acc_norm_stderr": 0.014572650383409167
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.45,
|
311 |
+
"acc_stderr": 0.05,
|
312 |
+
"acc_norm": 0.45,
|
313 |
+
"acc_norm_stderr": 0.05
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.42,
|
317 |
+
"acc_stderr": 0.04960449637488584,
|
318 |
+
"acc_norm": 0.42,
|
319 |
+
"acc_norm_stderr": 0.04960449637488584
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.45955882352941174,
|
323 |
+
"acc_stderr": 0.03027332507734575,
|
324 |
+
"acc_norm": 0.45955882352941174,
|
325 |
+
"acc_norm_stderr": 0.03027332507734575
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4897959183673469,
|
329 |
+
"acc_stderr": 0.03200255347893782,
|
330 |
+
"acc_norm": 0.4897959183673469,
|
331 |
+
"acc_norm_stderr": 0.03200255347893782
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6033755274261603,
|
335 |
+
"acc_stderr": 0.03184399873811225,
|
336 |
+
"acc_norm": 0.6033755274261603,
|
337 |
+
"acc_norm_stderr": 0.03184399873811225
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3285528031290743,
|
341 |
+
"acc_stderr": 0.011996027247502912,
|
342 |
+
"acc_norm": 0.3285528031290743,
|
343 |
+
"acc_norm_stderr": 0.011996027247502912
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.49019607843137253,
|
347 |
+
"acc_stderr": 0.035086373586305716,
|
348 |
+
"acc_norm": 0.49019607843137253,
|
349 |
+
"acc_norm_stderr": 0.035086373586305716
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5575757575757576,
|
353 |
+
"acc_stderr": 0.03878372113711275,
|
354 |
+
"acc_norm": 0.5575757575757576,
|
355 |
+
"acc_norm_stderr": 0.03878372113711275
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.27906976744186046,
|
359 |
+
"mc1_stderr": 0.01570210709062789,
|
360 |
+
"mc2": 0.4486611820923937,
|
361 |
+
"mc2_stderr": 0.015416976946375454
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.48406139315230223,
|
365 |
+
"acc_stderr": 0.017181617837190192,
|
366 |
+
"acc_norm": 0.5619834710743802,
|
367 |
+
"acc_norm_stderr": 0.01705775370216029
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v3-13b",
|
436 |
+
"model_sha": "199c2113f09f153bce1ad7aac35e6e756a99b89b",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.4087030716723549,
|
5 |
+
"acc_stderr": 0.014365750345427006,
|
6 |
+
"acc_norm": 0.4564846416382253,
|
7 |
+
"acc_norm_stderr": 0.01455594976049644
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.43168691495717987,
|
11 |
+
"acc_stderr": 0.0049429906231311166,
|
12 |
+
"acc_norm": 0.5795658235411273,
|
13 |
+
"acc_norm_stderr": 0.0049261984839487115
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5321637426900585,
|
17 |
+
"acc_stderr": 0.038268824176603704,
|
18 |
+
"acc_norm": 0.5321637426900585,
|
19 |
+
"acc_norm_stderr": 0.038268824176603704
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.49514563106796117,
|
23 |
+
"acc_stderr": 0.049505043821289195,
|
24 |
+
"acc_norm": 0.49514563106796117,
|
25 |
+
"acc_norm_stderr": 0.049505043821289195
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5555555555555556,
|
29 |
+
"acc_stderr": 0.017769250583533246,
|
30 |
+
"acc_norm": 0.5555555555555556,
|
31 |
+
"acc_norm_stderr": 0.017769250583533246
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4074074074074074,
|
35 |
+
"acc_stderr": 0.0424463323835323,
|
36 |
+
"acc_norm": 0.4074074074074074,
|
37 |
+
"acc_norm_stderr": 0.0424463323835323
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.27,
|
41 |
+
"acc_stderr": 0.04461960433384741,
|
42 |
+
"acc_norm": 0.27,
|
43 |
+
"acc_norm_stderr": 0.04461960433384741
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4,
|
47 |
+
"acc_stderr": 0.03202563076101735,
|
48 |
+
"acc_norm": 0.4,
|
49 |
+
"acc_norm_stderr": 0.03202563076101735
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.4457831325301205,
|
53 |
+
"acc_stderr": 0.038695433234721015,
|
54 |
+
"acc_norm": 0.4457831325301205,
|
55 |
+
"acc_norm_stderr": 0.038695433234721015
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4855305466237942,
|
59 |
+
"acc_stderr": 0.028386198084177673,
|
60 |
+
"acc_norm": 0.4855305466237942,
|
61 |
+
"acc_norm_stderr": 0.028386198084177673
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.5022421524663677,
|
65 |
+
"acc_stderr": 0.033557465352232634,
|
66 |
+
"acc_norm": 0.5022421524663677,
|
67 |
+
"acc_norm_stderr": 0.033557465352232634
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4732824427480916,
|
71 |
+
"acc_stderr": 0.04379024936553894,
|
72 |
+
"acc_norm": 0.4732824427480916,
|
73 |
+
"acc_norm_stderr": 0.04379024936553894
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.39,
|
77 |
+
"acc_stderr": 0.04902071300001975,
|
78 |
+
"acc_norm": 0.39,
|
79 |
+
"acc_norm_stderr": 0.04902071300001975
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5909090909090909,
|
83 |
+
"acc_stderr": 0.035029757994130085,
|
84 |
+
"acc_norm": 0.5909090909090909,
|
85 |
+
"acc_norm_stderr": 0.035029757994130085
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3724137931034483,
|
89 |
+
"acc_stderr": 0.0402873153294756,
|
90 |
+
"acc_norm": 0.3724137931034483,
|
91 |
+
"acc_norm_stderr": 0.0402873153294756
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.22549019607843138,
|
95 |
+
"acc_stderr": 0.04158307533083286,
|
96 |
+
"acc_norm": 0.22549019607843138,
|
97 |
+
"acc_norm_stderr": 0.04158307533083286
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.453781512605042,
|
101 |
+
"acc_stderr": 0.03233943468182088,
|
102 |
+
"acc_norm": 0.453781512605042,
|
103 |
+
"acc_norm_stderr": 0.03233943468182088
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.45897435897435895,
|
107 |
+
"acc_stderr": 0.025265525491284295,
|
108 |
+
"acc_norm": 0.45897435897435895,
|
109 |
+
"acc_norm_stderr": 0.025265525491284295
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.54,
|
113 |
+
"acc_stderr": 0.05009082659620332,
|
114 |
+
"acc_norm": 0.54,
|
115 |
+
"acc_norm_stderr": 0.05009082659620332
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.048241815132442176,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.048241815132442176
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5462962962962963,
|
125 |
+
"acc_stderr": 0.048129173245368216,
|
126 |
+
"acc_norm": 0.5462962962962963,
|
127 |
+
"acc_norm_stderr": 0.048129173245368216
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35467980295566504,
|
131 |
+
"acc_stderr": 0.03366124489051449,
|
132 |
+
"acc_norm": 0.35467980295566504,
|
133 |
+
"acc_norm_stderr": 0.03366124489051449
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.45806451612903226,
|
137 |
+
"acc_stderr": 0.028343787250540636,
|
138 |
+
"acc_norm": 0.45806451612903226,
|
139 |
+
"acc_norm_stderr": 0.028343787250540636
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6495726495726496,
|
143 |
+
"acc_stderr": 0.0312561082442188,
|
144 |
+
"acc_norm": 0.6495726495726496,
|
145 |
+
"acc_norm_stderr": 0.0312561082442188
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4490566037735849,
|
149 |
+
"acc_stderr": 0.030612730713641092,
|
150 |
+
"acc_norm": 0.4490566037735849,
|
151 |
+
"acc_norm_stderr": 0.030612730713641092
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.6,
|
155 |
+
"acc_stderr": 0.0469237132203465,
|
156 |
+
"acc_norm": 0.6,
|
157 |
+
"acc_norm_stderr": 0.0469237132203465
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2518518518518518,
|
161 |
+
"acc_stderr": 0.026466117538959916,
|
162 |
+
"acc_norm": 0.2518518518518518,
|
163 |
+
"acc_norm_stderr": 0.026466117538959916
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.31125827814569534,
|
167 |
+
"acc_stderr": 0.03780445850526733,
|
168 |
+
"acc_norm": 0.31125827814569534,
|
169 |
+
"acc_norm_stderr": 0.03780445850526733
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5671641791044776,
|
173 |
+
"acc_stderr": 0.03503490923673281,
|
174 |
+
"acc_norm": 0.5671641791044776,
|
175 |
+
"acc_norm_stderr": 0.03503490923673281
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4161849710982659,
|
179 |
+
"acc_stderr": 0.037585177754049466,
|
180 |
+
"acc_norm": 0.4161849710982659,
|
181 |
+
"acc_norm_stderr": 0.037585177754049466
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.30423280423280424,
|
185 |
+
"acc_stderr": 0.023695415009463087,
|
186 |
+
"acc_norm": 0.30423280423280424,
|
187 |
+
"acc_norm_stderr": 0.023695415009463087
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3958333333333333,
|
191 |
+
"acc_stderr": 0.04089465449325583,
|
192 |
+
"acc_norm": 0.3958333333333333,
|
193 |
+
"acc_norm_stderr": 0.04089465449325583
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.31,
|
197 |
+
"acc_stderr": 0.04648231987117316,
|
198 |
+
"acc_norm": 0.31,
|
199 |
+
"acc_norm_stderr": 0.04648231987117316
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.64,
|
203 |
+
"acc_stderr": 0.048241815132442176,
|
204 |
+
"acc_norm": 0.64,
|
205 |
+
"acc_norm_stderr": 0.048241815132442176
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.49710982658959535,
|
209 |
+
"acc_stderr": 0.02691864538323901,
|
210 |
+
"acc_norm": 0.49710982658959535,
|
211 |
+
"acc_norm_stderr": 0.02691864538323901
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.5766871165644172,
|
215 |
+
"acc_stderr": 0.03881891213334383,
|
216 |
+
"acc_norm": 0.5766871165644172,
|
217 |
+
"acc_norm_stderr": 0.03881891213334383
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49691358024691357,
|
221 |
+
"acc_stderr": 0.027820214158594377,
|
222 |
+
"acc_norm": 0.49691358024691357,
|
223 |
+
"acc_norm_stderr": 0.027820214158594377
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.36,
|
227 |
+
"acc_stderr": 0.04824181513244218,
|
228 |
+
"acc_norm": 0.36,
|
229 |
+
"acc_norm_stderr": 0.04824181513244218
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5440414507772021,
|
233 |
+
"acc_stderr": 0.03594413711272436,
|
234 |
+
"acc_norm": 0.5440414507772021,
|
235 |
+
"acc_norm_stderr": 0.03594413711272436
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.21052631578947367,
|
239 |
+
"acc_stderr": 0.03835153954399419,
|
240 |
+
"acc_norm": 0.21052631578947367,
|
241 |
+
"acc_norm_stderr": 0.03835153954399419
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5871559633027523,
|
245 |
+
"acc_stderr": 0.02110912813341391,
|
246 |
+
"acc_norm": 0.5871559633027523,
|
247 |
+
"acc_norm_stderr": 0.02110912813341391
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3333333333333333,
|
251 |
+
"acc_stderr": 0.04216370213557835,
|
252 |
+
"acc_norm": 0.3333333333333333,
|
253 |
+
"acc_norm_stderr": 0.04216370213557835
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4215686274509804,
|
257 |
+
"acc_stderr": 0.02827549015679143,
|
258 |
+
"acc_norm": 0.4215686274509804,
|
259 |
+
"acc_norm_stderr": 0.02827549015679143
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.47,
|
263 |
+
"acc_stderr": 0.05016135580465919,
|
264 |
+
"acc_norm": 0.47,
|
265 |
+
"acc_norm_stderr": 0.05016135580465919
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5950413223140496,
|
269 |
+
"acc_stderr": 0.04481137755942469,
|
270 |
+
"acc_norm": 0.5950413223140496,
|
271 |
+
"acc_norm_stderr": 0.04481137755942469
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.375,
|
275 |
+
"acc_stderr": 0.039397364351956274,
|
276 |
+
"acc_norm": 0.375,
|
277 |
+
"acc_norm_stderr": 0.039397364351956274
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.42810457516339867,
|
281 |
+
"acc_stderr": 0.0200176292142131,
|
282 |
+
"acc_norm": 0.42810457516339867,
|
283 |
+
"acc_norm_stderr": 0.0200176292142131
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3404255319148936,
|
287 |
+
"acc_stderr": 0.028267657482650144,
|
288 |
+
"acc_norm": 0.3404255319148936,
|
289 |
+
"acc_norm_stderr": 0.028267657482650144
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.2767857142857143,
|
293 |
+
"acc_stderr": 0.04246624336697625,
|
294 |
+
"acc_norm": 0.2767857142857143,
|
295 |
+
"acc_norm_stderr": 0.04246624336697625
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.33796296296296297,
|
299 |
+
"acc_stderr": 0.03225941352631295,
|
300 |
+
"acc_norm": 0.33796296296296297,
|
301 |
+
"acc_norm_stderr": 0.03225941352631295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2435754189944134,
|
305 |
+
"acc_stderr": 0.01435591196476786,
|
306 |
+
"acc_norm": 0.2435754189944134,
|
307 |
+
"acc_norm_stderr": 0.01435591196476786
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.38,
|
311 |
+
"acc_stderr": 0.048783173121456316,
|
312 |
+
"acc_norm": 0.38,
|
313 |
+
"acc_norm_stderr": 0.048783173121456316
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.39,
|
317 |
+
"acc_stderr": 0.04902071300001974,
|
318 |
+
"acc_norm": 0.39,
|
319 |
+
"acc_norm_stderr": 0.04902071300001974
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.38235294117647056,
|
323 |
+
"acc_stderr": 0.02952009569768775,
|
324 |
+
"acc_norm": 0.38235294117647056,
|
325 |
+
"acc_norm_stderr": 0.02952009569768775
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5020408163265306,
|
329 |
+
"acc_stderr": 0.0320089533497105,
|
330 |
+
"acc_norm": 0.5020408163265306,
|
331 |
+
"acc_norm_stderr": 0.0320089533497105
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6751054852320675,
|
335 |
+
"acc_stderr": 0.030486039389105303,
|
336 |
+
"acc_norm": 0.6751054852320675,
|
337 |
+
"acc_norm_stderr": 0.030486039389105303
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3741851368970013,
|
341 |
+
"acc_stderr": 0.012359335618172063,
|
342 |
+
"acc_norm": 0.3741851368970013,
|
343 |
+
"acc_norm_stderr": 0.012359335618172063
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.5294117647058824,
|
347 |
+
"acc_stderr": 0.03503235296367993,
|
348 |
+
"acc_norm": 0.5294117647058824,
|
349 |
+
"acc_norm_stderr": 0.03503235296367993
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5575757575757576,
|
353 |
+
"acc_stderr": 0.03878372113711275,
|
354 |
+
"acc_norm": 0.5575757575757576,
|
355 |
+
"acc_norm_stderr": 0.03878372113711275
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3243574051407589,
|
359 |
+
"mc1_stderr": 0.01638797677964794,
|
360 |
+
"mc2": 0.4753344144954286,
|
361 |
+
"mc2_stderr": 0.015470233894001158
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.500590318772137,
|
365 |
+
"acc_stderr": 0.01719034212344859,
|
366 |
+
"acc_norm": 0.5726092089728453,
|
367 |
+
"acc_norm_stderr": 0.017008129844823156
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v4-13B",
|
436 |
+
"model_sha": "fabf605d23d96e548908ffe9f0ad49dae01c46f8",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3822525597269625,
|
5 |
+
"acc_stderr": 0.014200454049979272,
|
6 |
+
"acc_norm": 0.43600682593856654,
|
7 |
+
"acc_norm_stderr": 0.014491225699230918
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.4221270663214499,
|
11 |
+
"acc_stderr": 0.00492889189587429,
|
12 |
+
"acc_norm": 0.5567616012746465,
|
13 |
+
"acc_norm_stderr": 0.004957524197900418
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4619883040935672,
|
17 |
+
"acc_stderr": 0.03823727092882307,
|
18 |
+
"acc_norm": 0.4619883040935672,
|
19 |
+
"acc_norm_stderr": 0.03823727092882307
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.49514563106796117,
|
23 |
+
"acc_stderr": 0.04950504382128921,
|
24 |
+
"acc_norm": 0.49514563106796117,
|
25 |
+
"acc_norm_stderr": 0.04950504382128921
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5274584929757343,
|
29 |
+
"acc_stderr": 0.017852981266633944,
|
30 |
+
"acc_norm": 0.5274584929757343,
|
31 |
+
"acc_norm_stderr": 0.017852981266633944
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4740740740740741,
|
35 |
+
"acc_stderr": 0.04313531696750573,
|
36 |
+
"acc_norm": 0.4740740740740741,
|
37 |
+
"acc_norm_stderr": 0.04313531696750573
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.29,
|
41 |
+
"acc_stderr": 0.045604802157206824,
|
42 |
+
"acc_norm": 0.29,
|
43 |
+
"acc_norm_stderr": 0.045604802157206824
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.40425531914893614,
|
47 |
+
"acc_stderr": 0.03208115750788684,
|
48 |
+
"acc_norm": 0.40425531914893614,
|
49 |
+
"acc_norm_stderr": 0.03208115750788684
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.39156626506024095,
|
53 |
+
"acc_stderr": 0.03799857454479637,
|
54 |
+
"acc_norm": 0.39156626506024095,
|
55 |
+
"acc_norm_stderr": 0.03799857454479637
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.47266881028938906,
|
59 |
+
"acc_stderr": 0.028355633568328174,
|
60 |
+
"acc_norm": 0.47266881028938906,
|
61 |
+
"acc_norm_stderr": 0.028355633568328174
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.47533632286995514,
|
65 |
+
"acc_stderr": 0.03351695167652628,
|
66 |
+
"acc_norm": 0.47533632286995514,
|
67 |
+
"acc_norm_stderr": 0.03351695167652628
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4580152671755725,
|
71 |
+
"acc_stderr": 0.04369802690578756,
|
72 |
+
"acc_norm": 0.4580152671755725,
|
73 |
+
"acc_norm_stderr": 0.04369802690578756
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.049604496374885836,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.049604496374885836
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.51010101010101,
|
83 |
+
"acc_stderr": 0.035616254886737454,
|
84 |
+
"acc_norm": 0.51010101010101,
|
85 |
+
"acc_norm_stderr": 0.035616254886737454
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4896551724137931,
|
89 |
+
"acc_stderr": 0.04165774775728763,
|
90 |
+
"acc_norm": 0.4896551724137931,
|
91 |
+
"acc_norm_stderr": 0.04165774775728763
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.20588235294117646,
|
95 |
+
"acc_stderr": 0.04023382273617747,
|
96 |
+
"acc_norm": 0.20588235294117646,
|
97 |
+
"acc_norm_stderr": 0.04023382273617747
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.40756302521008403,
|
101 |
+
"acc_stderr": 0.031918633744784645,
|
102 |
+
"acc_norm": 0.40756302521008403,
|
103 |
+
"acc_norm_stderr": 0.031918633744784645
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.39487179487179486,
|
107 |
+
"acc_stderr": 0.024784316942156367,
|
108 |
+
"acc_norm": 0.39487179487179486,
|
109 |
+
"acc_norm_stderr": 0.024784316942156367
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.53,
|
113 |
+
"acc_stderr": 0.05016135580465919,
|
114 |
+
"acc_norm": 0.53,
|
115 |
+
"acc_norm_stderr": 0.05016135580465919
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.33,
|
119 |
+
"acc_stderr": 0.047258156262526045,
|
120 |
+
"acc_norm": 0.33,
|
121 |
+
"acc_norm_stderr": 0.047258156262526045
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5277777777777778,
|
125 |
+
"acc_stderr": 0.04826217294139894,
|
126 |
+
"acc_norm": 0.5277777777777778,
|
127 |
+
"acc_norm_stderr": 0.04826217294139894
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.39408866995073893,
|
131 |
+
"acc_stderr": 0.03438157967036545,
|
132 |
+
"acc_norm": 0.39408866995073893,
|
133 |
+
"acc_norm_stderr": 0.03438157967036545
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.44193548387096776,
|
137 |
+
"acc_stderr": 0.028251557906849734,
|
138 |
+
"acc_norm": 0.44193548387096776,
|
139 |
+
"acc_norm_stderr": 0.028251557906849734
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6324786324786325,
|
143 |
+
"acc_stderr": 0.031585391577456365,
|
144 |
+
"acc_norm": 0.6324786324786325,
|
145 |
+
"acc_norm_stderr": 0.031585391577456365
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4188679245283019,
|
149 |
+
"acc_stderr": 0.030365050829115208,
|
150 |
+
"acc_norm": 0.4188679245283019,
|
151 |
+
"acc_norm_stderr": 0.030365050829115208
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4090909090909091,
|
155 |
+
"acc_stderr": 0.04709306978661896,
|
156 |
+
"acc_norm": 0.4090909090909091,
|
157 |
+
"acc_norm_stderr": 0.04709306978661896
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.2740740740740741,
|
161 |
+
"acc_stderr": 0.027195934804085626,
|
162 |
+
"acc_norm": 0.2740740740740741,
|
163 |
+
"acc_norm_stderr": 0.027195934804085626
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2913907284768212,
|
167 |
+
"acc_stderr": 0.03710185726119995,
|
168 |
+
"acc_norm": 0.2913907284768212,
|
169 |
+
"acc_norm_stderr": 0.03710185726119995
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5522388059701493,
|
173 |
+
"acc_stderr": 0.03516184772952167,
|
174 |
+
"acc_norm": 0.5522388059701493,
|
175 |
+
"acc_norm_stderr": 0.03516184772952167
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3815028901734104,
|
179 |
+
"acc_stderr": 0.03703851193099521,
|
180 |
+
"acc_norm": 0.3815028901734104,
|
181 |
+
"acc_norm_stderr": 0.03703851193099521
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2804232804232804,
|
185 |
+
"acc_stderr": 0.02313528797432563,
|
186 |
+
"acc_norm": 0.2804232804232804,
|
187 |
+
"acc_norm_stderr": 0.02313528797432563
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3472222222222222,
|
191 |
+
"acc_stderr": 0.039812405437178615,
|
192 |
+
"acc_norm": 0.3472222222222222,
|
193 |
+
"acc_norm_stderr": 0.039812405437178615
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.3,
|
197 |
+
"acc_stderr": 0.046056618647183814,
|
198 |
+
"acc_norm": 0.3,
|
199 |
+
"acc_norm_stderr": 0.046056618647183814
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.61,
|
203 |
+
"acc_stderr": 0.04902071300001975,
|
204 |
+
"acc_norm": 0.61,
|
205 |
+
"acc_norm_stderr": 0.04902071300001975
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5115606936416185,
|
209 |
+
"acc_stderr": 0.026911898686377906,
|
210 |
+
"acc_norm": 0.5115606936416185,
|
211 |
+
"acc_norm_stderr": 0.026911898686377906
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44785276073619634,
|
215 |
+
"acc_stderr": 0.03906947479456601,
|
216 |
+
"acc_norm": 0.44785276073619634,
|
217 |
+
"acc_norm_stderr": 0.03906947479456601
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.43209876543209874,
|
221 |
+
"acc_stderr": 0.02756301097160667,
|
222 |
+
"acc_norm": 0.43209876543209874,
|
223 |
+
"acc_norm_stderr": 0.02756301097160667
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.538860103626943,
|
233 |
+
"acc_stderr": 0.035975244117345775,
|
234 |
+
"acc_norm": 0.538860103626943,
|
235 |
+
"acc_norm_stderr": 0.035975244117345775
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2982456140350877,
|
239 |
+
"acc_stderr": 0.04303684033537316,
|
240 |
+
"acc_norm": 0.2982456140350877,
|
241 |
+
"acc_norm_stderr": 0.04303684033537316
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5009174311926605,
|
245 |
+
"acc_stderr": 0.021437287056051215,
|
246 |
+
"acc_norm": 0.5009174311926605,
|
247 |
+
"acc_norm_stderr": 0.021437287056051215
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2619047619047619,
|
251 |
+
"acc_stderr": 0.0393253768039287,
|
252 |
+
"acc_norm": 0.2619047619047619,
|
253 |
+
"acc_norm_stderr": 0.0393253768039287
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.434640522875817,
|
257 |
+
"acc_stderr": 0.028384256704883037,
|
258 |
+
"acc_norm": 0.434640522875817,
|
259 |
+
"acc_norm_stderr": 0.028384256704883037
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.42,
|
263 |
+
"acc_stderr": 0.049604496374885836,
|
264 |
+
"acc_norm": 0.42,
|
265 |
+
"acc_norm_stderr": 0.049604496374885836
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5950413223140496,
|
269 |
+
"acc_stderr": 0.04481137755942469,
|
270 |
+
"acc_norm": 0.5950413223140496,
|
271 |
+
"acc_norm_stderr": 0.04481137755942469
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3684210526315789,
|
275 |
+
"acc_stderr": 0.03925523381052932,
|
276 |
+
"acc_norm": 0.3684210526315789,
|
277 |
+
"acc_norm_stderr": 0.03925523381052932
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.380718954248366,
|
281 |
+
"acc_stderr": 0.019643801557924806,
|
282 |
+
"acc_norm": 0.380718954248366,
|
283 |
+
"acc_norm_stderr": 0.019643801557924806
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.31560283687943264,
|
287 |
+
"acc_stderr": 0.027724989449509314,
|
288 |
+
"acc_norm": 0.31560283687943264,
|
289 |
+
"acc_norm_stderr": 0.027724989449509314
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.2767857142857143,
|
293 |
+
"acc_stderr": 0.042466243366976235,
|
294 |
+
"acc_norm": 0.2767857142857143,
|
295 |
+
"acc_norm_stderr": 0.042466243366976235
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.26851851851851855,
|
299 |
+
"acc_stderr": 0.030225226160012386,
|
300 |
+
"acc_norm": 0.26851851851851855,
|
301 |
+
"acc_norm_stderr": 0.030225226160012386
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2446927374301676,
|
305 |
+
"acc_stderr": 0.014378169884098424,
|
306 |
+
"acc_norm": 0.2446927374301676,
|
307 |
+
"acc_norm_stderr": 0.014378169884098424
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.37,
|
311 |
+
"acc_stderr": 0.04852365870939099,
|
312 |
+
"acc_norm": 0.37,
|
313 |
+
"acc_norm_stderr": 0.04852365870939099
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.47,
|
317 |
+
"acc_stderr": 0.05016135580465919,
|
318 |
+
"acc_norm": 0.47,
|
319 |
+
"acc_norm_stderr": 0.05016135580465919
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3382352941176471,
|
323 |
+
"acc_stderr": 0.028739328513983576,
|
324 |
+
"acc_norm": 0.3382352941176471,
|
325 |
+
"acc_norm_stderr": 0.028739328513983576
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.40816326530612246,
|
329 |
+
"acc_stderr": 0.03146465712827424,
|
330 |
+
"acc_norm": 0.40816326530612246,
|
331 |
+
"acc_norm_stderr": 0.03146465712827424
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.569620253164557,
|
335 |
+
"acc_stderr": 0.03223017195937598,
|
336 |
+
"acc_norm": 0.569620253164557,
|
337 |
+
"acc_norm_stderr": 0.03223017195937598
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.33116036505867014,
|
341 |
+
"acc_stderr": 0.01202012819598575,
|
342 |
+
"acc_norm": 0.33116036505867014,
|
343 |
+
"acc_norm_stderr": 0.01202012819598575
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.45098039215686275,
|
347 |
+
"acc_stderr": 0.03492406104163614,
|
348 |
+
"acc_norm": 0.45098039215686275,
|
349 |
+
"acc_norm_stderr": 0.03492406104163614
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5636363636363636,
|
353 |
+
"acc_stderr": 0.03872592983524754,
|
354 |
+
"acc_norm": 0.5636363636363636,
|
355 |
+
"acc_norm_stderr": 0.03872592983524754
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.29253365973072215,
|
359 |
+
"mc1_stderr": 0.015925597445286165,
|
360 |
+
"mc2": 0.4591418911312825,
|
361 |
+
"mc2_stderr": 0.015363002653584545
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4911452184179457,
|
365 |
+
"acc_stderr": 0.017187658199336736,
|
366 |
+
"acc_norm": 0.5454545454545454,
|
367 |
+
"acc_norm_stderr": 0.017119172208061504
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v5-13B",
|
436 |
+
"model_sha": "e625b2673e2a0839e7d3fc0f2a844e9966404678",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3916382252559727,
|
5 |
+
"acc_stderr": 0.014264122124938215,
|
6 |
+
"acc_norm": 0.45051194539249145,
|
7 |
+
"acc_norm_stderr": 0.014539646098471627
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.4272057359091814,
|
11 |
+
"acc_stderr": 0.004936616428922639,
|
12 |
+
"acc_norm": 0.5610436168094005,
|
13 |
+
"acc_norm_stderr": 0.004952454721934797
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4678362573099415,
|
17 |
+
"acc_stderr": 0.03826882417660369,
|
18 |
+
"acc_norm": 0.4678362573099415,
|
19 |
+
"acc_norm_stderr": 0.03826882417660369
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.4854368932038835,
|
23 |
+
"acc_stderr": 0.04948637324026637,
|
24 |
+
"acc_norm": 0.4854368932038835,
|
25 |
+
"acc_norm_stderr": 0.04948637324026637
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5210727969348659,
|
29 |
+
"acc_stderr": 0.017864076786212903,
|
30 |
+
"acc_norm": 0.5210727969348659,
|
31 |
+
"acc_norm_stderr": 0.017864076786212903
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4148148148148148,
|
35 |
+
"acc_stderr": 0.04256193767901407,
|
36 |
+
"acc_norm": 0.4148148148148148,
|
37 |
+
"acc_norm_stderr": 0.04256193767901407
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.29,
|
41 |
+
"acc_stderr": 0.045604802157206824,
|
42 |
+
"acc_norm": 0.29,
|
43 |
+
"acc_norm_stderr": 0.045604802157206824
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.39574468085106385,
|
47 |
+
"acc_stderr": 0.03196758697835362,
|
48 |
+
"acc_norm": 0.39574468085106385,
|
49 |
+
"acc_norm_stderr": 0.03196758697835362
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.42771084337349397,
|
53 |
+
"acc_stderr": 0.03851597683718533,
|
54 |
+
"acc_norm": 0.42771084337349397,
|
55 |
+
"acc_norm_stderr": 0.03851597683718533
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4565916398713826,
|
59 |
+
"acc_stderr": 0.028290869054197604,
|
60 |
+
"acc_norm": 0.4565916398713826,
|
61 |
+
"acc_norm_stderr": 0.028290869054197604
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.47533632286995514,
|
65 |
+
"acc_stderr": 0.03351695167652629,
|
66 |
+
"acc_norm": 0.47533632286995514,
|
67 |
+
"acc_norm_stderr": 0.03351695167652629
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4580152671755725,
|
71 |
+
"acc_stderr": 0.04369802690578756,
|
72 |
+
"acc_norm": 0.4580152671755725,
|
73 |
+
"acc_norm_stderr": 0.04369802690578756
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.37,
|
77 |
+
"acc_stderr": 0.04852365870939098,
|
78 |
+
"acc_norm": 0.37,
|
79 |
+
"acc_norm_stderr": 0.04852365870939098
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5202020202020202,
|
83 |
+
"acc_stderr": 0.03559443565563919,
|
84 |
+
"acc_norm": 0.5202020202020202,
|
85 |
+
"acc_norm_stderr": 0.03559443565563919
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4068965517241379,
|
89 |
+
"acc_stderr": 0.04093793981266237,
|
90 |
+
"acc_norm": 0.4068965517241379,
|
91 |
+
"acc_norm_stderr": 0.04093793981266237
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.23529411764705882,
|
95 |
+
"acc_stderr": 0.04220773659171453,
|
96 |
+
"acc_norm": 0.23529411764705882,
|
97 |
+
"acc_norm_stderr": 0.04220773659171453
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.40756302521008403,
|
101 |
+
"acc_stderr": 0.031918633744784645,
|
102 |
+
"acc_norm": 0.40756302521008403,
|
103 |
+
"acc_norm_stderr": 0.031918633744784645
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4230769230769231,
|
107 |
+
"acc_stderr": 0.02504919787604233,
|
108 |
+
"acc_norm": 0.4230769230769231,
|
109 |
+
"acc_norm_stderr": 0.02504919787604233
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.57,
|
113 |
+
"acc_stderr": 0.049756985195624284,
|
114 |
+
"acc_norm": 0.57,
|
115 |
+
"acc_norm_stderr": 0.049756985195624284
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.35,
|
119 |
+
"acc_stderr": 0.0479372485441102,
|
120 |
+
"acc_norm": 0.35,
|
121 |
+
"acc_norm_stderr": 0.0479372485441102
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.48148148148148145,
|
125 |
+
"acc_stderr": 0.04830366024635331,
|
126 |
+
"acc_norm": 0.48148148148148145,
|
127 |
+
"acc_norm_stderr": 0.04830366024635331
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.39408866995073893,
|
131 |
+
"acc_stderr": 0.034381579670365446,
|
132 |
+
"acc_norm": 0.39408866995073893,
|
133 |
+
"acc_norm_stderr": 0.034381579670365446
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.45483870967741935,
|
137 |
+
"acc_stderr": 0.028327743091561056,
|
138 |
+
"acc_norm": 0.45483870967741935,
|
139 |
+
"acc_norm_stderr": 0.028327743091561056
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6410256410256411,
|
143 |
+
"acc_stderr": 0.03142616993791924,
|
144 |
+
"acc_norm": 0.6410256410256411,
|
145 |
+
"acc_norm_stderr": 0.03142616993791924
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.45660377358490567,
|
149 |
+
"acc_stderr": 0.03065674869673943,
|
150 |
+
"acc_norm": 0.45660377358490567,
|
151 |
+
"acc_norm_stderr": 0.03065674869673943
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4727272727272727,
|
155 |
+
"acc_stderr": 0.04782001791380063,
|
156 |
+
"acc_norm": 0.4727272727272727,
|
157 |
+
"acc_norm_stderr": 0.04782001791380063
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.24814814814814815,
|
161 |
+
"acc_stderr": 0.0263357394040558,
|
162 |
+
"acc_norm": 0.24814814814814815,
|
163 |
+
"acc_norm_stderr": 0.0263357394040558
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.31788079470198677,
|
167 |
+
"acc_stderr": 0.03802039760107903,
|
168 |
+
"acc_norm": 0.31788079470198677,
|
169 |
+
"acc_norm_stderr": 0.03802039760107903
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.572139303482587,
|
173 |
+
"acc_stderr": 0.03498541988407795,
|
174 |
+
"acc_norm": 0.572139303482587,
|
175 |
+
"acc_norm_stderr": 0.03498541988407795
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3699421965317919,
|
179 |
+
"acc_stderr": 0.03681229633394319,
|
180 |
+
"acc_norm": 0.3699421965317919,
|
181 |
+
"acc_norm_stderr": 0.03681229633394319
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2724867724867725,
|
185 |
+
"acc_stderr": 0.022930973071633345,
|
186 |
+
"acc_norm": 0.2724867724867725,
|
187 |
+
"acc_norm_stderr": 0.022930973071633345
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3263888888888889,
|
191 |
+
"acc_stderr": 0.03921067198982266,
|
192 |
+
"acc_norm": 0.3263888888888889,
|
193 |
+
"acc_norm_stderr": 0.03921067198982266
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.36,
|
197 |
+
"acc_stderr": 0.04824181513244218,
|
198 |
+
"acc_norm": 0.36,
|
199 |
+
"acc_norm_stderr": 0.04824181513244218
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.59,
|
203 |
+
"acc_stderr": 0.04943110704237102,
|
204 |
+
"acc_norm": 0.59,
|
205 |
+
"acc_norm_stderr": 0.04943110704237102
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.4421965317919075,
|
209 |
+
"acc_stderr": 0.026738603643807403,
|
210 |
+
"acc_norm": 0.4421965317919075,
|
211 |
+
"acc_norm_stderr": 0.026738603643807403
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44785276073619634,
|
215 |
+
"acc_stderr": 0.03906947479456601,
|
216 |
+
"acc_norm": 0.44785276073619634,
|
217 |
+
"acc_norm_stderr": 0.03906947479456601
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4567901234567901,
|
221 |
+
"acc_stderr": 0.027716661650194038,
|
222 |
+
"acc_norm": 0.4567901234567901,
|
223 |
+
"acc_norm_stderr": 0.027716661650194038
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.49740932642487046,
|
233 |
+
"acc_stderr": 0.03608390745384487,
|
234 |
+
"acc_norm": 0.49740932642487046,
|
235 |
+
"acc_norm_stderr": 0.03608390745384487
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.20175438596491227,
|
239 |
+
"acc_stderr": 0.03775205013583639,
|
240 |
+
"acc_norm": 0.20175438596491227,
|
241 |
+
"acc_norm_stderr": 0.03775205013583639
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.4935779816513762,
|
245 |
+
"acc_stderr": 0.021435554820013077,
|
246 |
+
"acc_norm": 0.4935779816513762,
|
247 |
+
"acc_norm_stderr": 0.021435554820013077
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.30952380952380953,
|
251 |
+
"acc_stderr": 0.04134913018303316,
|
252 |
+
"acc_norm": 0.30952380952380953,
|
253 |
+
"acc_norm_stderr": 0.04134913018303316
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.42810457516339867,
|
257 |
+
"acc_stderr": 0.02833239748366427,
|
258 |
+
"acc_norm": 0.42810457516339867,
|
259 |
+
"acc_norm_stderr": 0.02833239748366427
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.43,
|
263 |
+
"acc_stderr": 0.049756985195624284,
|
264 |
+
"acc_norm": 0.43,
|
265 |
+
"acc_norm_stderr": 0.049756985195624284
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5950413223140496,
|
269 |
+
"acc_stderr": 0.04481137755942469,
|
270 |
+
"acc_norm": 0.5950413223140496,
|
271 |
+
"acc_norm_stderr": 0.04481137755942469
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3881578947368421,
|
275 |
+
"acc_stderr": 0.03965842097512744,
|
276 |
+
"acc_norm": 0.3881578947368421,
|
277 |
+
"acc_norm_stderr": 0.03965842097512744
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3660130718954248,
|
281 |
+
"acc_stderr": 0.019488025745529672,
|
282 |
+
"acc_norm": 0.3660130718954248,
|
283 |
+
"acc_norm_stderr": 0.019488025745529672
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2801418439716312,
|
287 |
+
"acc_stderr": 0.02678917235114024,
|
288 |
+
"acc_norm": 0.2801418439716312,
|
289 |
+
"acc_norm_stderr": 0.02678917235114024
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.25892857142857145,
|
293 |
+
"acc_stderr": 0.04157751539865629,
|
294 |
+
"acc_norm": 0.25892857142857145,
|
295 |
+
"acc_norm_stderr": 0.04157751539865629
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3101851851851852,
|
299 |
+
"acc_stderr": 0.031546962856566295,
|
300 |
+
"acc_norm": 0.3101851851851852,
|
301 |
+
"acc_norm_stderr": 0.031546962856566295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2446927374301676,
|
305 |
+
"acc_stderr": 0.014378169884098424,
|
306 |
+
"acc_norm": 0.2446927374301676,
|
307 |
+
"acc_norm_stderr": 0.014378169884098424
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.33,
|
311 |
+
"acc_stderr": 0.047258156262526045,
|
312 |
+
"acc_norm": 0.33,
|
313 |
+
"acc_norm_stderr": 0.047258156262526045
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.44,
|
317 |
+
"acc_stderr": 0.049888765156985884,
|
318 |
+
"acc_norm": 0.44,
|
319 |
+
"acc_norm_stderr": 0.049888765156985884
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.33088235294117646,
|
323 |
+
"acc_stderr": 0.028582709753898428,
|
324 |
+
"acc_norm": 0.33088235294117646,
|
325 |
+
"acc_norm_stderr": 0.028582709753898428
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.46938775510204084,
|
329 |
+
"acc_stderr": 0.031949171367580624,
|
330 |
+
"acc_norm": 0.46938775510204084,
|
331 |
+
"acc_norm_stderr": 0.031949171367580624
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5485232067510548,
|
335 |
+
"acc_stderr": 0.032393600173974704,
|
336 |
+
"acc_norm": 0.5485232067510548,
|
337 |
+
"acc_norm_stderr": 0.032393600173974704
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.31486310299869624,
|
341 |
+
"acc_stderr": 0.011862561755715945,
|
342 |
+
"acc_norm": 0.31486310299869624,
|
343 |
+
"acc_norm_stderr": 0.011862561755715945
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4411764705882353,
|
347 |
+
"acc_stderr": 0.034849415144292316,
|
348 |
+
"acc_norm": 0.4411764705882353,
|
349 |
+
"acc_norm_stderr": 0.034849415144292316
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5393939393939394,
|
353 |
+
"acc_stderr": 0.03892207016552012,
|
354 |
+
"acc_norm": 0.5393939393939394,
|
355 |
+
"acc_norm_stderr": 0.03892207016552012
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3023255813953488,
|
359 |
+
"mc1_stderr": 0.01607750926613303,
|
360 |
+
"mc2": 0.45710797981768625,
|
361 |
+
"mc2_stderr": 0.015464643764155465
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.46871310507674147,
|
365 |
+
"acc_stderr": 0.017156666859785456,
|
366 |
+
"acc_norm": 0.5619834710743802,
|
367 |
+
"acc_norm_stderr": 0.017057753702160294
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v6-13B",
|
436 |
+
"model_sha": "f24326c48f4edb60bc3bdc186b65e0fcb9254c1e",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3916382252559727,
|
5 |
+
"acc_stderr": 0.014264122124938213,
|
6 |
+
"acc_norm": 0.4564846416382253,
|
7 |
+
"acc_norm_stderr": 0.014555949760496435
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.42272455686118304,
|
11 |
+
"acc_stderr": 0.00492982833760698,
|
12 |
+
"acc_norm": 0.5592511451902011,
|
13 |
+
"acc_norm_stderr": 0.004954622308739005
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.0383161053282193,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.0383161053282193
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.46601941747572817,
|
23 |
+
"acc_stderr": 0.04939291447273481,
|
24 |
+
"acc_norm": 0.46601941747572817,
|
25 |
+
"acc_norm_stderr": 0.04939291447273481
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5146871008939975,
|
29 |
+
"acc_stderr": 0.017872248024429122,
|
30 |
+
"acc_norm": 0.5146871008939975,
|
31 |
+
"acc_norm_stderr": 0.017872248024429122
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.45185185185185184,
|
35 |
+
"acc_stderr": 0.04299268905480863,
|
36 |
+
"acc_norm": 0.45185185185185184,
|
37 |
+
"acc_norm_stderr": 0.04299268905480863
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.29,
|
41 |
+
"acc_stderr": 0.045604802157206824,
|
42 |
+
"acc_norm": 0.29,
|
43 |
+
"acc_norm_stderr": 0.045604802157206824
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4297872340425532,
|
47 |
+
"acc_stderr": 0.03236214467715564,
|
48 |
+
"acc_norm": 0.4297872340425532,
|
49 |
+
"acc_norm_stderr": 0.03236214467715564
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.40963855421686746,
|
53 |
+
"acc_stderr": 0.038284011150790206,
|
54 |
+
"acc_norm": 0.40963855421686746,
|
55 |
+
"acc_norm_stderr": 0.038284011150790206
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4887459807073955,
|
59 |
+
"acc_stderr": 0.028390897396863537,
|
60 |
+
"acc_norm": 0.4887459807073955,
|
61 |
+
"acc_norm_stderr": 0.028390897396863537
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.515695067264574,
|
65 |
+
"acc_stderr": 0.0335412657542081,
|
66 |
+
"acc_norm": 0.515695067264574,
|
67 |
+
"acc_norm_stderr": 0.0335412657542081
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4351145038167939,
|
71 |
+
"acc_stderr": 0.04348208051644858,
|
72 |
+
"acc_norm": 0.4351145038167939,
|
73 |
+
"acc_norm_stderr": 0.04348208051644858
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.39,
|
77 |
+
"acc_stderr": 0.04902071300001974,
|
78 |
+
"acc_norm": 0.39,
|
79 |
+
"acc_norm_stderr": 0.04902071300001974
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5050505050505051,
|
83 |
+
"acc_stderr": 0.035621707606254015,
|
84 |
+
"acc_norm": 0.5050505050505051,
|
85 |
+
"acc_norm_stderr": 0.035621707606254015
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.35172413793103446,
|
89 |
+
"acc_stderr": 0.03979236637497411,
|
90 |
+
"acc_norm": 0.35172413793103446,
|
91 |
+
"acc_norm_stderr": 0.03979236637497411
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.21568627450980393,
|
95 |
+
"acc_stderr": 0.04092563958237655,
|
96 |
+
"acc_norm": 0.21568627450980393,
|
97 |
+
"acc_norm_stderr": 0.04092563958237655
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.3697478991596639,
|
101 |
+
"acc_stderr": 0.031357095996135904,
|
102 |
+
"acc_norm": 0.3697478991596639,
|
103 |
+
"acc_norm_stderr": 0.031357095996135904
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.38974358974358975,
|
107 |
+
"acc_stderr": 0.024726967886647074,
|
108 |
+
"acc_norm": 0.38974358974358975,
|
109 |
+
"acc_norm_stderr": 0.024726967886647074
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.52,
|
113 |
+
"acc_stderr": 0.050211673156867795,
|
114 |
+
"acc_norm": 0.52,
|
115 |
+
"acc_norm_stderr": 0.050211673156867795
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.04824181513244218,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.04824181513244218
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5185185185185185,
|
125 |
+
"acc_stderr": 0.04830366024635331,
|
126 |
+
"acc_norm": 0.5185185185185185,
|
127 |
+
"acc_norm_stderr": 0.04830366024635331
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3842364532019704,
|
131 |
+
"acc_stderr": 0.0342239856565755,
|
132 |
+
"acc_norm": 0.3842364532019704,
|
133 |
+
"acc_norm_stderr": 0.0342239856565755
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.43548387096774194,
|
137 |
+
"acc_stderr": 0.028206225591502737,
|
138 |
+
"acc_norm": 0.43548387096774194,
|
139 |
+
"acc_norm_stderr": 0.028206225591502737
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6410256410256411,
|
143 |
+
"acc_stderr": 0.03142616993791924,
|
144 |
+
"acc_norm": 0.6410256410256411,
|
145 |
+
"acc_norm_stderr": 0.03142616993791924
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.43018867924528303,
|
149 |
+
"acc_stderr": 0.030471445867183238,
|
150 |
+
"acc_norm": 0.43018867924528303,
|
151 |
+
"acc_norm_stderr": 0.030471445867183238
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.44545454545454544,
|
155 |
+
"acc_stderr": 0.047605488214603246,
|
156 |
+
"acc_norm": 0.44545454545454544,
|
157 |
+
"acc_norm_stderr": 0.047605488214603246
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.24444444444444444,
|
161 |
+
"acc_stderr": 0.026202766534652148,
|
162 |
+
"acc_norm": 0.24444444444444444,
|
163 |
+
"acc_norm_stderr": 0.026202766534652148
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2582781456953642,
|
167 |
+
"acc_stderr": 0.035737053147634576,
|
168 |
+
"acc_norm": 0.2582781456953642,
|
169 |
+
"acc_norm_stderr": 0.035737053147634576
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.527363184079602,
|
173 |
+
"acc_stderr": 0.03530235517334682,
|
174 |
+
"acc_norm": 0.527363184079602,
|
175 |
+
"acc_norm_stderr": 0.03530235517334682
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3815028901734104,
|
179 |
+
"acc_stderr": 0.03703851193099522,
|
180 |
+
"acc_norm": 0.3815028901734104,
|
181 |
+
"acc_norm_stderr": 0.03703851193099522
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2698412698412698,
|
185 |
+
"acc_stderr": 0.022860838309232072,
|
186 |
+
"acc_norm": 0.2698412698412698,
|
187 |
+
"acc_norm_stderr": 0.022860838309232072
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3333333333333333,
|
191 |
+
"acc_stderr": 0.039420826399272135,
|
192 |
+
"acc_norm": 0.3333333333333333,
|
193 |
+
"acc_norm_stderr": 0.039420826399272135
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.33,
|
197 |
+
"acc_stderr": 0.04725815626252606,
|
198 |
+
"acc_norm": 0.33,
|
199 |
+
"acc_norm_stderr": 0.04725815626252606
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.61,
|
203 |
+
"acc_stderr": 0.04902071300001975,
|
204 |
+
"acc_norm": 0.61,
|
205 |
+
"acc_norm_stderr": 0.04902071300001975
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.48265895953757226,
|
209 |
+
"acc_stderr": 0.026902900458666647,
|
210 |
+
"acc_norm": 0.48265895953757226,
|
211 |
+
"acc_norm_stderr": 0.026902900458666647
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44171779141104295,
|
215 |
+
"acc_stderr": 0.03901591825836184,
|
216 |
+
"acc_norm": 0.44171779141104295,
|
217 |
+
"acc_norm_stderr": 0.03901591825836184
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.44753086419753085,
|
221 |
+
"acc_stderr": 0.027667138569422708,
|
222 |
+
"acc_norm": 0.44753086419753085,
|
223 |
+
"acc_norm_stderr": 0.027667138569422708
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.29,
|
227 |
+
"acc_stderr": 0.045604802157206845,
|
228 |
+
"acc_norm": 0.29,
|
229 |
+
"acc_norm_stderr": 0.045604802157206845
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.49740932642487046,
|
233 |
+
"acc_stderr": 0.03608390745384486,
|
234 |
+
"acc_norm": 0.49740932642487046,
|
235 |
+
"acc_norm_stderr": 0.03608390745384486
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2719298245614035,
|
239 |
+
"acc_stderr": 0.041857744240220575,
|
240 |
+
"acc_norm": 0.2719298245614035,
|
241 |
+
"acc_norm_stderr": 0.041857744240220575
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.48807339449541287,
|
245 |
+
"acc_stderr": 0.021431223617362233,
|
246 |
+
"acc_norm": 0.48807339449541287,
|
247 |
+
"acc_norm_stderr": 0.021431223617362233
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.23809523809523808,
|
251 |
+
"acc_stderr": 0.038095238095238106,
|
252 |
+
"acc_norm": 0.23809523809523808,
|
253 |
+
"acc_norm_stderr": 0.038095238095238106
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4019607843137255,
|
257 |
+
"acc_stderr": 0.028074158947600663,
|
258 |
+
"acc_norm": 0.4019607843137255,
|
259 |
+
"acc_norm_stderr": 0.028074158947600663
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.43,
|
263 |
+
"acc_stderr": 0.049756985195624284,
|
264 |
+
"acc_norm": 0.43,
|
265 |
+
"acc_norm_stderr": 0.049756985195624284
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5371900826446281,
|
269 |
+
"acc_stderr": 0.04551711196104218,
|
270 |
+
"acc_norm": 0.5371900826446281,
|
271 |
+
"acc_norm_stderr": 0.04551711196104218
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3355263157894737,
|
275 |
+
"acc_stderr": 0.038424985593952694,
|
276 |
+
"acc_norm": 0.3355263157894737,
|
277 |
+
"acc_norm_stderr": 0.038424985593952694
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.37254901960784315,
|
281 |
+
"acc_stderr": 0.019559646809215923,
|
282 |
+
"acc_norm": 0.37254901960784315,
|
283 |
+
"acc_norm_stderr": 0.019559646809215923
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.30141843971631205,
|
287 |
+
"acc_stderr": 0.02737412888263115,
|
288 |
+
"acc_norm": 0.30141843971631205,
|
289 |
+
"acc_norm_stderr": 0.02737412888263115
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.25892857142857145,
|
293 |
+
"acc_stderr": 0.04157751539865629,
|
294 |
+
"acc_norm": 0.25892857142857145,
|
295 |
+
"acc_norm_stderr": 0.04157751539865629
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.2777777777777778,
|
299 |
+
"acc_stderr": 0.030546745264953195,
|
300 |
+
"acc_norm": 0.2777777777777778,
|
301 |
+
"acc_norm_stderr": 0.030546745264953195
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2435754189944134,
|
305 |
+
"acc_stderr": 0.014355911964767857,
|
306 |
+
"acc_norm": 0.2435754189944134,
|
307 |
+
"acc_norm_stderr": 0.014355911964767857
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.33,
|
311 |
+
"acc_stderr": 0.04725815626252605,
|
312 |
+
"acc_norm": 0.33,
|
313 |
+
"acc_norm_stderr": 0.04725815626252605
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.43,
|
317 |
+
"acc_stderr": 0.049756985195624284,
|
318 |
+
"acc_norm": 0.43,
|
319 |
+
"acc_norm_stderr": 0.049756985195624284
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.33088235294117646,
|
323 |
+
"acc_stderr": 0.02858270975389843,
|
324 |
+
"acc_norm": 0.33088235294117646,
|
325 |
+
"acc_norm_stderr": 0.02858270975389843
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.44081632653061226,
|
329 |
+
"acc_stderr": 0.03178419114175364,
|
330 |
+
"acc_norm": 0.44081632653061226,
|
331 |
+
"acc_norm_stderr": 0.03178419114175364
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5569620253164557,
|
335 |
+
"acc_stderr": 0.03233532777533484,
|
336 |
+
"acc_norm": 0.5569620253164557,
|
337 |
+
"acc_norm_stderr": 0.03233532777533484
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.32529335071707954,
|
341 |
+
"acc_stderr": 0.011965311536571528,
|
342 |
+
"acc_norm": 0.32529335071707954,
|
343 |
+
"acc_norm_stderr": 0.011965311536571528
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4362745098039216,
|
347 |
+
"acc_stderr": 0.03480693138457039,
|
348 |
+
"acc_norm": 0.4362745098039216,
|
349 |
+
"acc_norm_stderr": 0.03480693138457039
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5333333333333333,
|
353 |
+
"acc_stderr": 0.03895658065271846,
|
354 |
+
"acc_norm": 0.5333333333333333,
|
355 |
+
"acc_norm_stderr": 0.03895658065271846
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.3084455324357405,
|
359 |
+
"mc1_stderr": 0.01616803938315687,
|
360 |
+
"mc2": 0.4532384559135145,
|
361 |
+
"mc2_stderr": 0.015485047009493541
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4793388429752066,
|
365 |
+
"acc_stderr": 0.017175671279836446,
|
366 |
+
"acc_norm": 0.5301062573789846,
|
367 |
+
"acc_norm_stderr": 0.017159163590170223
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v7-13B",
|
436 |
+
"model_sha": "40b2f1775ec5f92bfa8191fda6bb5f7c78564b3c",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3890784982935154,
|
5 |
+
"acc_stderr": 0.014247309976045607,
|
6 |
+
"acc_norm": 0.4496587030716723,
|
7 |
+
"acc_norm_stderr": 0.01453714444428474
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.42013543118900615,
|
11 |
+
"acc_stderr": 0.00492571700809971,
|
12 |
+
"acc_norm": 0.5487950607448715,
|
13 |
+
"acc_norm_stderr": 0.004965963647210315
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5555555555555556,
|
17 |
+
"acc_stderr": 0.03811079669833531,
|
18 |
+
"acc_norm": 0.5555555555555556,
|
19 |
+
"acc_norm_stderr": 0.03811079669833531
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5145631067961165,
|
23 |
+
"acc_stderr": 0.04948637324026637,
|
24 |
+
"acc_norm": 0.5145631067961165,
|
25 |
+
"acc_norm_stderr": 0.04948637324026637
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.4878671775223499,
|
29 |
+
"acc_stderr": 0.01787469866749135,
|
30 |
+
"acc_norm": 0.4878671775223499,
|
31 |
+
"acc_norm_stderr": 0.01787469866749135
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.37037037037037035,
|
35 |
+
"acc_stderr": 0.04171654161354544,
|
36 |
+
"acc_norm": 0.37037037037037035,
|
37 |
+
"acc_norm_stderr": 0.04171654161354544
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.24,
|
41 |
+
"acc_stderr": 0.04292346959909284,
|
42 |
+
"acc_norm": 0.24,
|
43 |
+
"acc_norm_stderr": 0.04292346959909284
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3276595744680851,
|
47 |
+
"acc_stderr": 0.030683020843231015,
|
48 |
+
"acc_norm": 0.3276595744680851,
|
49 |
+
"acc_norm_stderr": 0.030683020843231015
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3795180722891566,
|
53 |
+
"acc_stderr": 0.03777798822748018,
|
54 |
+
"acc_norm": 0.3795180722891566,
|
55 |
+
"acc_norm_stderr": 0.03777798822748018
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5048231511254019,
|
59 |
+
"acc_stderr": 0.02839677044411129,
|
60 |
+
"acc_norm": 0.5048231511254019,
|
61 |
+
"acc_norm_stderr": 0.02839677044411129
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.45739910313901344,
|
65 |
+
"acc_stderr": 0.033435777055830646,
|
66 |
+
"acc_norm": 0.45739910313901344,
|
67 |
+
"acc_norm_stderr": 0.033435777055830646
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.48091603053435117,
|
71 |
+
"acc_stderr": 0.04382094705550988,
|
72 |
+
"acc_norm": 0.48091603053435117,
|
73 |
+
"acc_norm_stderr": 0.04382094705550988
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.39,
|
77 |
+
"acc_stderr": 0.04902071300001975,
|
78 |
+
"acc_norm": 0.39,
|
79 |
+
"acc_norm_stderr": 0.04902071300001975
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5050505050505051,
|
83 |
+
"acc_stderr": 0.035621707606254015,
|
84 |
+
"acc_norm": 0.5050505050505051,
|
85 |
+
"acc_norm_stderr": 0.035621707606254015
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3724137931034483,
|
89 |
+
"acc_stderr": 0.0402873153294756,
|
90 |
+
"acc_norm": 0.3724137931034483,
|
91 |
+
"acc_norm_stderr": 0.0402873153294756
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.22549019607843138,
|
95 |
+
"acc_stderr": 0.04158307533083286,
|
96 |
+
"acc_norm": 0.22549019607843138,
|
97 |
+
"acc_norm_stderr": 0.04158307533083286
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.46638655462184875,
|
101 |
+
"acc_stderr": 0.03240501447690071,
|
102 |
+
"acc_norm": 0.46638655462184875,
|
103 |
+
"acc_norm_stderr": 0.03240501447690071
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4307692307692308,
|
107 |
+
"acc_stderr": 0.025106820660539753,
|
108 |
+
"acc_norm": 0.4307692307692308,
|
109 |
+
"acc_norm_stderr": 0.025106820660539753
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.54,
|
113 |
+
"acc_stderr": 0.05009082659620332,
|
114 |
+
"acc_norm": 0.54,
|
115 |
+
"acc_norm_stderr": 0.05009082659620332
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.31,
|
119 |
+
"acc_stderr": 0.04648231987117316,
|
120 |
+
"acc_norm": 0.31,
|
121 |
+
"acc_norm_stderr": 0.04648231987117316
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.49074074074074076,
|
125 |
+
"acc_stderr": 0.04832853553437055,
|
126 |
+
"acc_norm": 0.49074074074074076,
|
127 |
+
"acc_norm_stderr": 0.04832853553437055
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3399014778325123,
|
131 |
+
"acc_stderr": 0.033327690684107895,
|
132 |
+
"acc_norm": 0.3399014778325123,
|
133 |
+
"acc_norm_stderr": 0.033327690684107895
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.44516129032258067,
|
137 |
+
"acc_stderr": 0.02827241018621491,
|
138 |
+
"acc_norm": 0.44516129032258067,
|
139 |
+
"acc_norm_stderr": 0.02827241018621491
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6452991452991453,
|
143 |
+
"acc_stderr": 0.03134250486245402,
|
144 |
+
"acc_norm": 0.6452991452991453,
|
145 |
+
"acc_norm_stderr": 0.03134250486245402
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.39245283018867927,
|
149 |
+
"acc_stderr": 0.030052580579557845,
|
150 |
+
"acc_norm": 0.39245283018867927,
|
151 |
+
"acc_norm_stderr": 0.030052580579557845
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4,
|
155 |
+
"acc_stderr": 0.0469237132203465,
|
156 |
+
"acc_norm": 0.4,
|
157 |
+
"acc_norm_stderr": 0.0469237132203465
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.22592592592592592,
|
161 |
+
"acc_stderr": 0.025497532639609553,
|
162 |
+
"acc_norm": 0.22592592592592592,
|
163 |
+
"acc_norm_stderr": 0.025497532639609553
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2847682119205298,
|
167 |
+
"acc_stderr": 0.03684881521389023,
|
168 |
+
"acc_norm": 0.2847682119205298,
|
169 |
+
"acc_norm_stderr": 0.03684881521389023
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5970149253731343,
|
173 |
+
"acc_stderr": 0.034683432951111266,
|
174 |
+
"acc_norm": 0.5970149253731343,
|
175 |
+
"acc_norm_stderr": 0.034683432951111266
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3988439306358382,
|
179 |
+
"acc_stderr": 0.037336266553835096,
|
180 |
+
"acc_norm": 0.3988439306358382,
|
181 |
+
"acc_norm_stderr": 0.037336266553835096
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.31216931216931215,
|
185 |
+
"acc_stderr": 0.023865206836972585,
|
186 |
+
"acc_norm": 0.31216931216931215,
|
187 |
+
"acc_norm_stderr": 0.023865206836972585
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3402777777777778,
|
191 |
+
"acc_stderr": 0.03962135573486219,
|
192 |
+
"acc_norm": 0.3402777777777778,
|
193 |
+
"acc_norm_stderr": 0.03962135573486219
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.31,
|
197 |
+
"acc_stderr": 0.04648231987117316,
|
198 |
+
"acc_norm": 0.31,
|
199 |
+
"acc_norm_stderr": 0.04648231987117316
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.64,
|
203 |
+
"acc_stderr": 0.04824181513244218,
|
204 |
+
"acc_norm": 0.64,
|
205 |
+
"acc_norm_stderr": 0.04824181513244218
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5,
|
209 |
+
"acc_stderr": 0.026919095102908273,
|
210 |
+
"acc_norm": 0.5,
|
211 |
+
"acc_norm_stderr": 0.026919095102908273
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44171779141104295,
|
215 |
+
"acc_stderr": 0.03901591825836185,
|
216 |
+
"acc_norm": 0.44171779141104295,
|
217 |
+
"acc_norm_stderr": 0.03901591825836185
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.46296296296296297,
|
221 |
+
"acc_stderr": 0.027744313443376536,
|
222 |
+
"acc_norm": 0.46296296296296297,
|
223 |
+
"acc_norm_stderr": 0.027744313443376536
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.36,
|
227 |
+
"acc_stderr": 0.04824181513244218,
|
228 |
+
"acc_norm": 0.36,
|
229 |
+
"acc_norm_stderr": 0.04824181513244218
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.49740932642487046,
|
233 |
+
"acc_stderr": 0.03608390745384487,
|
234 |
+
"acc_norm": 0.49740932642487046,
|
235 |
+
"acc_norm_stderr": 0.03608390745384487
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.21929824561403508,
|
239 |
+
"acc_stderr": 0.03892431106518754,
|
240 |
+
"acc_norm": 0.21929824561403508,
|
241 |
+
"acc_norm_stderr": 0.03892431106518754
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5009174311926605,
|
245 |
+
"acc_stderr": 0.021437287056051215,
|
246 |
+
"acc_norm": 0.5009174311926605,
|
247 |
+
"acc_norm_stderr": 0.021437287056051215
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.25396825396825395,
|
251 |
+
"acc_stderr": 0.038932596106046734,
|
252 |
+
"acc_norm": 0.25396825396825395,
|
253 |
+
"acc_norm_stderr": 0.038932596106046734
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.46078431372549017,
|
257 |
+
"acc_stderr": 0.028541722692618874,
|
258 |
+
"acc_norm": 0.46078431372549017,
|
259 |
+
"acc_norm_stderr": 0.028541722692618874
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.51,
|
263 |
+
"acc_stderr": 0.05024183937956911,
|
264 |
+
"acc_norm": 0.51,
|
265 |
+
"acc_norm_stderr": 0.05024183937956911
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6528925619834711,
|
269 |
+
"acc_stderr": 0.04345724570292534,
|
270 |
+
"acc_norm": 0.6528925619834711,
|
271 |
+
"acc_norm_stderr": 0.04345724570292534
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.48026315789473684,
|
275 |
+
"acc_stderr": 0.040657710025626057,
|
276 |
+
"acc_norm": 0.48026315789473684,
|
277 |
+
"acc_norm_stderr": 0.040657710025626057
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.35784313725490197,
|
281 |
+
"acc_stderr": 0.019393058402355435,
|
282 |
+
"acc_norm": 0.35784313725490197,
|
283 |
+
"acc_norm_stderr": 0.019393058402355435
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3191489361702128,
|
287 |
+
"acc_stderr": 0.027807990141320203,
|
288 |
+
"acc_norm": 0.3191489361702128,
|
289 |
+
"acc_norm_stderr": 0.027807990141320203
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.22321428571428573,
|
293 |
+
"acc_stderr": 0.039523019677025116,
|
294 |
+
"acc_norm": 0.22321428571428573,
|
295 |
+
"acc_norm_stderr": 0.039523019677025116
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.27314814814814814,
|
299 |
+
"acc_stderr": 0.030388051301678116,
|
300 |
+
"acc_norm": 0.27314814814814814,
|
301 |
+
"acc_norm_stderr": 0.030388051301678116
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2424581005586592,
|
305 |
+
"acc_stderr": 0.01433352205921789,
|
306 |
+
"acc_norm": 0.2424581005586592,
|
307 |
+
"acc_norm_stderr": 0.01433352205921789
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.36,
|
311 |
+
"acc_stderr": 0.048241815132442176,
|
312 |
+
"acc_norm": 0.36,
|
313 |
+
"acc_norm_stderr": 0.048241815132442176
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.44,
|
317 |
+
"acc_stderr": 0.04988876515698589,
|
318 |
+
"acc_norm": 0.44,
|
319 |
+
"acc_norm_stderr": 0.04988876515698589
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.2757352941176471,
|
323 |
+
"acc_stderr": 0.027146271936625166,
|
324 |
+
"acc_norm": 0.2757352941176471,
|
325 |
+
"acc_norm_stderr": 0.027146271936625166
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4489795918367347,
|
329 |
+
"acc_stderr": 0.03184213866687579,
|
330 |
+
"acc_norm": 0.4489795918367347,
|
331 |
+
"acc_norm_stderr": 0.03184213866687579
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.569620253164557,
|
335 |
+
"acc_stderr": 0.03223017195937598,
|
336 |
+
"acc_norm": 0.569620253164557,
|
337 |
+
"acc_norm_stderr": 0.03223017195937598
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3135593220338983,
|
341 |
+
"acc_stderr": 0.011849234291459313,
|
342 |
+
"acc_norm": 0.3135593220338983,
|
343 |
+
"acc_norm_stderr": 0.011849234291459313
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4950980392156863,
|
347 |
+
"acc_stderr": 0.03509143375606785,
|
348 |
+
"acc_norm": 0.4950980392156863,
|
349 |
+
"acc_norm_stderr": 0.03509143375606785
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5515151515151515,
|
353 |
+
"acc_stderr": 0.03883565977956928,
|
354 |
+
"acc_norm": 0.5515151515151515,
|
355 |
+
"acc_norm_stderr": 0.03883565977956928
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.26805385556915545,
|
359 |
+
"mc1_stderr": 0.015506204722834562,
|
360 |
+
"mc2": 0.44689474709496685,
|
361 |
+
"mc2_stderr": 0.015256070107718848
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.5478158205430933,
|
365 |
+
"acc_stderr": 0.017111567130916796,
|
366 |
+
"acc_norm": 0.5962219598583235,
|
367 |
+
"acc_norm_stderr": 0.016869031540298632
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B-v2",
|
436 |
+
"model_sha": "1b4eb6319be99c113d17778ce2737acffe2a0fee",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3839590443686007,
|
5 |
+
"acc_stderr": 0.01421244498065189,
|
6 |
+
"acc_norm": 0.4522184300341297,
|
7 |
+
"acc_norm_stderr": 0.014544519880633832
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.4166500697072296,
|
11 |
+
"acc_stderr": 0.00491996282220832,
|
12 |
+
"acc_norm": 0.5524795857398924,
|
13 |
+
"acc_norm_stderr": 0.004962220512548352
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5614035087719298,
|
17 |
+
"acc_stderr": 0.038057975055904594,
|
18 |
+
"acc_norm": 0.5614035087719298,
|
19 |
+
"acc_norm_stderr": 0.038057975055904594
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.49514563106796117,
|
23 |
+
"acc_stderr": 0.049505043821289195,
|
24 |
+
"acc_norm": 0.49514563106796117,
|
25 |
+
"acc_norm_stderr": 0.049505043821289195
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.5351213282247765,
|
29 |
+
"acc_stderr": 0.017835798806290642,
|
30 |
+
"acc_norm": 0.5351213282247765,
|
31 |
+
"acc_norm_stderr": 0.017835798806290642
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.34814814814814815,
|
35 |
+
"acc_stderr": 0.041153246103369526,
|
36 |
+
"acc_norm": 0.34814814814814815,
|
37 |
+
"acc_norm_stderr": 0.041153246103369526
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.24,
|
41 |
+
"acc_stderr": 0.042923469599092816,
|
42 |
+
"acc_norm": 0.24,
|
43 |
+
"acc_norm_stderr": 0.042923469599092816
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.31063829787234043,
|
47 |
+
"acc_stderr": 0.03025123757921317,
|
48 |
+
"acc_norm": 0.31063829787234043,
|
49 |
+
"acc_norm_stderr": 0.03025123757921317
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.42168674698795183,
|
53 |
+
"acc_stderr": 0.038444531817709175,
|
54 |
+
"acc_norm": 0.42168674698795183,
|
55 |
+
"acc_norm_stderr": 0.038444531817709175
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5048231511254019,
|
59 |
+
"acc_stderr": 0.02839677044411129,
|
60 |
+
"acc_norm": 0.5048231511254019,
|
61 |
+
"acc_norm_stderr": 0.02839677044411129
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4484304932735426,
|
65 |
+
"acc_stderr": 0.03337883736255099,
|
66 |
+
"acc_norm": 0.4484304932735426,
|
67 |
+
"acc_norm_stderr": 0.03337883736255099
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5267175572519084,
|
71 |
+
"acc_stderr": 0.04379024936553894,
|
72 |
+
"acc_norm": 0.5267175572519084,
|
73 |
+
"acc_norm_stderr": 0.04379024936553894
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.43,
|
77 |
+
"acc_stderr": 0.04975698519562429,
|
78 |
+
"acc_norm": 0.43,
|
79 |
+
"acc_norm_stderr": 0.04975698519562429
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.494949494949495,
|
83 |
+
"acc_stderr": 0.035621707606254015,
|
84 |
+
"acc_norm": 0.494949494949495,
|
85 |
+
"acc_norm_stderr": 0.035621707606254015
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4068965517241379,
|
89 |
+
"acc_stderr": 0.04093793981266236,
|
90 |
+
"acc_norm": 0.4068965517241379,
|
91 |
+
"acc_norm_stderr": 0.04093793981266236
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.17647058823529413,
|
95 |
+
"acc_stderr": 0.0379328118530781,
|
96 |
+
"acc_norm": 0.17647058823529413,
|
97 |
+
"acc_norm_stderr": 0.0379328118530781
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4957983193277311,
|
101 |
+
"acc_stderr": 0.03247734334448111,
|
102 |
+
"acc_norm": 0.4957983193277311,
|
103 |
+
"acc_norm_stderr": 0.03247734334448111
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4230769230769231,
|
107 |
+
"acc_stderr": 0.025049197876042328,
|
108 |
+
"acc_norm": 0.4230769230769231,
|
109 |
+
"acc_norm_stderr": 0.025049197876042328
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.46,
|
113 |
+
"acc_stderr": 0.05009082659620332,
|
114 |
+
"acc_norm": 0.46,
|
115 |
+
"acc_norm_stderr": 0.05009082659620332
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.28,
|
119 |
+
"acc_stderr": 0.04512608598542128,
|
120 |
+
"acc_norm": 0.28,
|
121 |
+
"acc_norm_stderr": 0.04512608598542128
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.46296296296296297,
|
125 |
+
"acc_stderr": 0.04820403072760628,
|
126 |
+
"acc_norm": 0.46296296296296297,
|
127 |
+
"acc_norm_stderr": 0.04820403072760628
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35960591133004927,
|
131 |
+
"acc_stderr": 0.03376458246509568,
|
132 |
+
"acc_norm": 0.35960591133004927,
|
133 |
+
"acc_norm_stderr": 0.03376458246509568
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.43548387096774194,
|
137 |
+
"acc_stderr": 0.02820622559150275,
|
138 |
+
"acc_norm": 0.43548387096774194,
|
139 |
+
"acc_norm_stderr": 0.02820622559150275
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6581196581196581,
|
143 |
+
"acc_stderr": 0.03107502852650775,
|
144 |
+
"acc_norm": 0.6581196581196581,
|
145 |
+
"acc_norm_stderr": 0.03107502852650775
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4490566037735849,
|
149 |
+
"acc_stderr": 0.030612730713641095,
|
150 |
+
"acc_norm": 0.4490566037735849,
|
151 |
+
"acc_norm_stderr": 0.030612730713641095
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.41818181818181815,
|
155 |
+
"acc_stderr": 0.04724577405731572,
|
156 |
+
"acc_norm": 0.41818181818181815,
|
157 |
+
"acc_norm_stderr": 0.04724577405731572
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.23333333333333334,
|
161 |
+
"acc_stderr": 0.02578787422095932,
|
162 |
+
"acc_norm": 0.23333333333333334,
|
163 |
+
"acc_norm_stderr": 0.02578787422095932
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2052980132450331,
|
167 |
+
"acc_stderr": 0.032979866484738336,
|
168 |
+
"acc_norm": 0.2052980132450331,
|
169 |
+
"acc_norm_stderr": 0.032979866484738336
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5771144278606966,
|
173 |
+
"acc_stderr": 0.034932317774212816,
|
174 |
+
"acc_norm": 0.5771144278606966,
|
175 |
+
"acc_norm_stderr": 0.034932317774212816
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3699421965317919,
|
179 |
+
"acc_stderr": 0.036812296333943194,
|
180 |
+
"acc_norm": 0.3699421965317919,
|
181 |
+
"acc_norm_stderr": 0.036812296333943194
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2830687830687831,
|
185 |
+
"acc_stderr": 0.023201392938194978,
|
186 |
+
"acc_norm": 0.2830687830687831,
|
187 |
+
"acc_norm_stderr": 0.023201392938194978
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4166666666666667,
|
191 |
+
"acc_stderr": 0.04122728707651282,
|
192 |
+
"acc_norm": 0.4166666666666667,
|
193 |
+
"acc_norm_stderr": 0.04122728707651282
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.28,
|
197 |
+
"acc_stderr": 0.04512608598542128,
|
198 |
+
"acc_norm": 0.28,
|
199 |
+
"acc_norm_stderr": 0.04512608598542128
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.66,
|
203 |
+
"acc_stderr": 0.04760952285695237,
|
204 |
+
"acc_norm": 0.66,
|
205 |
+
"acc_norm_stderr": 0.04760952285695237
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5057803468208093,
|
209 |
+
"acc_stderr": 0.02691729617914911,
|
210 |
+
"acc_norm": 0.5057803468208093,
|
211 |
+
"acc_norm_stderr": 0.02691729617914911
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44785276073619634,
|
215 |
+
"acc_stderr": 0.03906947479456602,
|
216 |
+
"acc_norm": 0.44785276073619634,
|
217 |
+
"acc_norm_stderr": 0.03906947479456602
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4722222222222222,
|
221 |
+
"acc_stderr": 0.027777777777777797,
|
222 |
+
"acc_norm": 0.4722222222222222,
|
223 |
+
"acc_norm_stderr": 0.027777777777777797
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.31,
|
227 |
+
"acc_stderr": 0.04648231987117316,
|
228 |
+
"acc_norm": 0.31,
|
229 |
+
"acc_norm_stderr": 0.04648231987117316
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5647668393782384,
|
233 |
+
"acc_stderr": 0.03578038165008586,
|
234 |
+
"acc_norm": 0.5647668393782384,
|
235 |
+
"acc_norm_stderr": 0.03578038165008586
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.23684210526315788,
|
239 |
+
"acc_stderr": 0.03999423879281335,
|
240 |
+
"acc_norm": 0.23684210526315788,
|
241 |
+
"acc_norm_stderr": 0.03999423879281335
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5522935779816514,
|
245 |
+
"acc_stderr": 0.02131975496242546,
|
246 |
+
"acc_norm": 0.5522935779816514,
|
247 |
+
"acc_norm_stderr": 0.02131975496242546
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2777777777777778,
|
251 |
+
"acc_stderr": 0.040061680838488774,
|
252 |
+
"acc_norm": 0.2777777777777778,
|
253 |
+
"acc_norm_stderr": 0.040061680838488774
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.45751633986928103,
|
257 |
+
"acc_stderr": 0.02852638345214264,
|
258 |
+
"acc_norm": 0.45751633986928103,
|
259 |
+
"acc_norm_stderr": 0.02852638345214264
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.48,
|
263 |
+
"acc_stderr": 0.050211673156867795,
|
264 |
+
"acc_norm": 0.48,
|
265 |
+
"acc_norm_stderr": 0.050211673156867795
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6859504132231405,
|
269 |
+
"acc_stderr": 0.04236964753041018,
|
270 |
+
"acc_norm": 0.6859504132231405,
|
271 |
+
"acc_norm_stderr": 0.04236964753041018
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.45394736842105265,
|
275 |
+
"acc_stderr": 0.04051646342874141,
|
276 |
+
"acc_norm": 0.45394736842105265,
|
277 |
+
"acc_norm_stderr": 0.04051646342874141
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3839869281045752,
|
281 |
+
"acc_stderr": 0.019675808135281525,
|
282 |
+
"acc_norm": 0.3839869281045752,
|
283 |
+
"acc_norm_stderr": 0.019675808135281525
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.35815602836879434,
|
287 |
+
"acc_stderr": 0.02860208586275942,
|
288 |
+
"acc_norm": 0.35815602836879434,
|
289 |
+
"acc_norm_stderr": 0.02860208586275942
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.2857142857142857,
|
293 |
+
"acc_stderr": 0.042878587513404544,
|
294 |
+
"acc_norm": 0.2857142857142857,
|
295 |
+
"acc_norm_stderr": 0.042878587513404544
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.25462962962962965,
|
299 |
+
"acc_stderr": 0.02971127586000534,
|
300 |
+
"acc_norm": 0.25462962962962965,
|
301 |
+
"acc_norm_stderr": 0.02971127586000534
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.293854748603352,
|
305 |
+
"acc_stderr": 0.015235075776719616,
|
306 |
+
"acc_norm": 0.293854748603352,
|
307 |
+
"acc_norm_stderr": 0.015235075776719616
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.32,
|
311 |
+
"acc_stderr": 0.046882617226215034,
|
312 |
+
"acc_norm": 0.32,
|
313 |
+
"acc_norm_stderr": 0.046882617226215034
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.41,
|
317 |
+
"acc_stderr": 0.049431107042371025,
|
318 |
+
"acc_norm": 0.41,
|
319 |
+
"acc_norm_stderr": 0.049431107042371025
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.27941176470588236,
|
323 |
+
"acc_stderr": 0.02725720260611495,
|
324 |
+
"acc_norm": 0.27941176470588236,
|
325 |
+
"acc_norm_stderr": 0.02725720260611495
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4775510204081633,
|
329 |
+
"acc_stderr": 0.031976941187136725,
|
330 |
+
"acc_norm": 0.4775510204081633,
|
331 |
+
"acc_norm_stderr": 0.031976941187136725
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6075949367088608,
|
335 |
+
"acc_stderr": 0.0317847187456473,
|
336 |
+
"acc_norm": 0.6075949367088608,
|
337 |
+
"acc_norm_stderr": 0.0317847187456473
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.33116036505867014,
|
341 |
+
"acc_stderr": 0.01202012819598576,
|
342 |
+
"acc_norm": 0.33116036505867014,
|
343 |
+
"acc_norm_stderr": 0.01202012819598576
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.49019607843137253,
|
347 |
+
"acc_stderr": 0.03508637358630572,
|
348 |
+
"acc_norm": 0.49019607843137253,
|
349 |
+
"acc_norm_stderr": 0.03508637358630572
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5454545454545454,
|
353 |
+
"acc_stderr": 0.038881769216741004,
|
354 |
+
"acc_norm": 0.5454545454545454,
|
355 |
+
"acc_norm_stderr": 0.038881769216741004
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.27539779681762544,
|
359 |
+
"mc1_stderr": 0.015638135667775523,
|
360 |
+
"mc2": 0.4478444454695957,
|
361 |
+
"mc2_stderr": 0.015296142940086415
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.5454545454545454,
|
365 |
+
"acc_stderr": 0.017119172208061504,
|
366 |
+
"acc_norm": 0.5938606847697757,
|
367 |
+
"acc_norm_stderr": 0.016884749503191396
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B",
|
436 |
+
"model_sha": "61d276d0715184790bae2979744f1ae7c0f451c0",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3455631399317406,
|
5 |
+
"acc_stderr": 0.013896938461145678,
|
6 |
+
"acc_norm": 0.3839590443686007,
|
7 |
+
"acc_norm_stderr": 0.01421244498065189
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3975303724357698,
|
11 |
+
"acc_stderr": 0.004883871774350598,
|
12 |
+
"acc_norm": 0.5247958573989245,
|
13 |
+
"acc_norm_stderr": 0.004983641854351152
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.3684210526315789,
|
17 |
+
"acc_stderr": 0.036996580176568775,
|
18 |
+
"acc_norm": 0.3684210526315789,
|
19 |
+
"acc_norm_stderr": 0.036996580176568775
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.32038834951456313,
|
23 |
+
"acc_stderr": 0.0462028408228004,
|
24 |
+
"acc_norm": 0.32038834951456313,
|
25 |
+
"acc_norm_stderr": 0.0462028408228004
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.3831417624521073,
|
29 |
+
"acc_stderr": 0.01738477419488563,
|
30 |
+
"acc_norm": 0.3831417624521073,
|
31 |
+
"acc_norm_stderr": 0.01738477419488563
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.32592592592592595,
|
35 |
+
"acc_stderr": 0.040491220417025055,
|
36 |
+
"acc_norm": 0.32592592592592595,
|
37 |
+
"acc_norm_stderr": 0.040491220417025055
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.28,
|
41 |
+
"acc_stderr": 0.045126085985421255,
|
42 |
+
"acc_norm": 0.28,
|
43 |
+
"acc_norm_stderr": 0.045126085985421255
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.28085106382978725,
|
47 |
+
"acc_stderr": 0.02937917046412482,
|
48 |
+
"acc_norm": 0.28085106382978725,
|
49 |
+
"acc_norm_stderr": 0.02937917046412482
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3795180722891566,
|
53 |
+
"acc_stderr": 0.03777798822748017,
|
54 |
+
"acc_norm": 0.3795180722891566,
|
55 |
+
"acc_norm_stderr": 0.03777798822748017
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.35691318327974275,
|
59 |
+
"acc_stderr": 0.027210420375934012,
|
60 |
+
"acc_norm": 0.35691318327974275,
|
61 |
+
"acc_norm_stderr": 0.027210420375934012
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4170403587443946,
|
65 |
+
"acc_stderr": 0.03309266936071721,
|
66 |
+
"acc_norm": 0.4170403587443946,
|
67 |
+
"acc_norm_stderr": 0.03309266936071721
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4198473282442748,
|
71 |
+
"acc_stderr": 0.043285772152629715,
|
72 |
+
"acc_norm": 0.4198473282442748,
|
73 |
+
"acc_norm_stderr": 0.043285772152629715
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.28,
|
77 |
+
"acc_stderr": 0.04512608598542127,
|
78 |
+
"acc_norm": 0.28,
|
79 |
+
"acc_norm_stderr": 0.04512608598542127
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.35858585858585856,
|
83 |
+
"acc_stderr": 0.034169036403915214,
|
84 |
+
"acc_norm": 0.35858585858585856,
|
85 |
+
"acc_norm_stderr": 0.034169036403915214
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.25517241379310346,
|
89 |
+
"acc_stderr": 0.03632984052707842,
|
90 |
+
"acc_norm": 0.25517241379310346,
|
91 |
+
"acc_norm_stderr": 0.03632984052707842
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.1568627450980392,
|
95 |
+
"acc_stderr": 0.03618664819936245,
|
96 |
+
"acc_norm": 0.1568627450980392,
|
97 |
+
"acc_norm_stderr": 0.03618664819936245
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.25630252100840334,
|
101 |
+
"acc_stderr": 0.028359620870533953,
|
102 |
+
"acc_norm": 0.25630252100840334,
|
103 |
+
"acc_norm_stderr": 0.028359620870533953
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.24871794871794872,
|
107 |
+
"acc_stderr": 0.021916957709213803,
|
108 |
+
"acc_norm": 0.24871794871794872,
|
109 |
+
"acc_norm_stderr": 0.021916957709213803
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.36,
|
113 |
+
"acc_stderr": 0.048241815132442176,
|
114 |
+
"acc_norm": 0.36,
|
115 |
+
"acc_norm_stderr": 0.048241815132442176
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.33,
|
119 |
+
"acc_stderr": 0.04725815626252605,
|
120 |
+
"acc_norm": 0.33,
|
121 |
+
"acc_norm_stderr": 0.04725815626252605
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.37037037037037035,
|
125 |
+
"acc_stderr": 0.04668408033024932,
|
126 |
+
"acc_norm": 0.37037037037037035,
|
127 |
+
"acc_norm_stderr": 0.04668408033024932
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.23645320197044334,
|
131 |
+
"acc_stderr": 0.029896114291733545,
|
132 |
+
"acc_norm": 0.23645320197044334,
|
133 |
+
"acc_norm_stderr": 0.029896114291733545
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.3161290322580645,
|
137 |
+
"acc_stderr": 0.026450874489042767,
|
138 |
+
"acc_norm": 0.3161290322580645,
|
139 |
+
"acc_norm_stderr": 0.026450874489042767
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.4358974358974359,
|
143 |
+
"acc_stderr": 0.03248577511578401,
|
144 |
+
"acc_norm": 0.4358974358974359,
|
145 |
+
"acc_norm_stderr": 0.03248577511578401
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.30943396226415093,
|
149 |
+
"acc_stderr": 0.028450154794118627,
|
150 |
+
"acc_norm": 0.30943396226415093,
|
151 |
+
"acc_norm_stderr": 0.028450154794118627
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.37272727272727274,
|
155 |
+
"acc_stderr": 0.04631381319425463,
|
156 |
+
"acc_norm": 0.37272727272727274,
|
157 |
+
"acc_norm_stderr": 0.04631381319425463
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.25555555555555554,
|
161 |
+
"acc_stderr": 0.026593939101844072,
|
162 |
+
"acc_norm": 0.25555555555555554,
|
163 |
+
"acc_norm_stderr": 0.026593939101844072
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.23841059602649006,
|
167 |
+
"acc_stderr": 0.0347918557259966,
|
168 |
+
"acc_norm": 0.23841059602649006,
|
169 |
+
"acc_norm_stderr": 0.0347918557259966
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.3283582089552239,
|
173 |
+
"acc_stderr": 0.033206858897443244,
|
174 |
+
"acc_norm": 0.3283582089552239,
|
175 |
+
"acc_norm_stderr": 0.033206858897443244
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.23121387283236994,
|
179 |
+
"acc_stderr": 0.0321473730202947,
|
180 |
+
"acc_norm": 0.23121387283236994,
|
181 |
+
"acc_norm_stderr": 0.0321473730202947
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.23809523809523808,
|
185 |
+
"acc_stderr": 0.021935878081184756,
|
186 |
+
"acc_norm": 0.23809523809523808,
|
187 |
+
"acc_norm_stderr": 0.021935878081184756
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2777777777777778,
|
191 |
+
"acc_stderr": 0.03745554791462457,
|
192 |
+
"acc_norm": 0.2777777777777778,
|
193 |
+
"acc_norm_stderr": 0.03745554791462457
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.21,
|
197 |
+
"acc_stderr": 0.040936018074033256,
|
198 |
+
"acc_norm": 0.21,
|
199 |
+
"acc_norm_stderr": 0.040936018074033256
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.37,
|
203 |
+
"acc_stderr": 0.04852365870939099,
|
204 |
+
"acc_norm": 0.37,
|
205 |
+
"acc_norm_stderr": 0.04852365870939099
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.33236994219653176,
|
209 |
+
"acc_stderr": 0.025361168749688225,
|
210 |
+
"acc_norm": 0.33236994219653176,
|
211 |
+
"acc_norm_stderr": 0.025361168749688225
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.25766871165644173,
|
215 |
+
"acc_stderr": 0.03436150827846917,
|
216 |
+
"acc_norm": 0.25766871165644173,
|
217 |
+
"acc_norm_stderr": 0.03436150827846917
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.33641975308641975,
|
221 |
+
"acc_stderr": 0.026289734945952926,
|
222 |
+
"acc_norm": 0.33641975308641975,
|
223 |
+
"acc_norm_stderr": 0.026289734945952926
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.28,
|
227 |
+
"acc_stderr": 0.04512608598542129,
|
228 |
+
"acc_norm": 0.28,
|
229 |
+
"acc_norm_stderr": 0.04512608598542129
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.32642487046632124,
|
233 |
+
"acc_stderr": 0.033840286211432945,
|
234 |
+
"acc_norm": 0.32642487046632124,
|
235 |
+
"acc_norm_stderr": 0.033840286211432945
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.30701754385964913,
|
239 |
+
"acc_stderr": 0.0433913832257986,
|
240 |
+
"acc_norm": 0.30701754385964913,
|
241 |
+
"acc_norm_stderr": 0.0433913832257986
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3174311926605505,
|
245 |
+
"acc_stderr": 0.019957152198460497,
|
246 |
+
"acc_norm": 0.3174311926605505,
|
247 |
+
"acc_norm_stderr": 0.019957152198460497
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.1746031746031746,
|
251 |
+
"acc_stderr": 0.03395490020856111,
|
252 |
+
"acc_norm": 0.1746031746031746,
|
253 |
+
"acc_norm_stderr": 0.03395490020856111
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.35294117647058826,
|
257 |
+
"acc_stderr": 0.02736359328468495,
|
258 |
+
"acc_norm": 0.35294117647058826,
|
259 |
+
"acc_norm_stderr": 0.02736359328468495
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.25,
|
263 |
+
"acc_stderr": 0.04351941398892446,
|
264 |
+
"acc_norm": 0.25,
|
265 |
+
"acc_norm_stderr": 0.04351941398892446
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.36363636363636365,
|
269 |
+
"acc_stderr": 0.043913262867240704,
|
270 |
+
"acc_norm": 0.36363636363636365,
|
271 |
+
"acc_norm_stderr": 0.043913262867240704
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3026315789473684,
|
275 |
+
"acc_stderr": 0.037385206761196686,
|
276 |
+
"acc_norm": 0.3026315789473684,
|
277 |
+
"acc_norm_stderr": 0.037385206761196686
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2679738562091503,
|
281 |
+
"acc_stderr": 0.017917974069594726,
|
282 |
+
"acc_norm": 0.2679738562091503,
|
283 |
+
"acc_norm_stderr": 0.017917974069594726
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2624113475177305,
|
287 |
+
"acc_stderr": 0.02624492034984301,
|
288 |
+
"acc_norm": 0.2624113475177305,
|
289 |
+
"acc_norm_stderr": 0.02624492034984301
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.33035714285714285,
|
293 |
+
"acc_stderr": 0.04464285714285712,
|
294 |
+
"acc_norm": 0.33035714285714285,
|
295 |
+
"acc_norm_stderr": 0.04464285714285712
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.24074074074074073,
|
299 |
+
"acc_stderr": 0.029157522184605586,
|
300 |
+
"acc_norm": 0.24074074074074073,
|
301 |
+
"acc_norm_stderr": 0.029157522184605586
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2424581005586592,
|
305 |
+
"acc_stderr": 0.01433352205921789,
|
306 |
+
"acc_norm": 0.2424581005586592,
|
307 |
+
"acc_norm_stderr": 0.01433352205921789
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.32,
|
311 |
+
"acc_stderr": 0.04688261722621504,
|
312 |
+
"acc_norm": 0.32,
|
313 |
+
"acc_norm_stderr": 0.04688261722621504
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.21,
|
317 |
+
"acc_stderr": 0.040936018074033256,
|
318 |
+
"acc_norm": 0.21,
|
319 |
+
"acc_norm_stderr": 0.040936018074033256
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.35294117647058826,
|
323 |
+
"acc_stderr": 0.0290294228156814,
|
324 |
+
"acc_norm": 0.35294117647058826,
|
325 |
+
"acc_norm_stderr": 0.0290294228156814
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.27755102040816326,
|
329 |
+
"acc_stderr": 0.028666857790274648,
|
330 |
+
"acc_norm": 0.27755102040816326,
|
331 |
+
"acc_norm_stderr": 0.028666857790274648
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.35864978902953587,
|
335 |
+
"acc_stderr": 0.031219569445301847,
|
336 |
+
"acc_norm": 0.35864978902953587,
|
337 |
+
"acc_norm_stderr": 0.031219569445301847
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.27249022164276404,
|
341 |
+
"acc_stderr": 0.01137165829431153,
|
342 |
+
"acc_norm": 0.27249022164276404,
|
343 |
+
"acc_norm_stderr": 0.01137165829431153
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.2696078431372549,
|
347 |
+
"acc_stderr": 0.031145570659486782,
|
348 |
+
"acc_norm": 0.2696078431372549,
|
349 |
+
"acc_norm_stderr": 0.031145570659486782
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3090909090909091,
|
353 |
+
"acc_stderr": 0.036085410115739666,
|
354 |
+
"acc_norm": 0.3090909090909091,
|
355 |
+
"acc_norm_stderr": 0.036085410115739666
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.24969400244798043,
|
359 |
+
"mc1_stderr": 0.015152286907148125,
|
360 |
+
"mc2": 0.39805148377575406,
|
361 |
+
"mc2_stderr": 0.015027401787198838
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2396694214876033,
|
365 |
+
"acc_stderr": 0.014676495332267253,
|
366 |
+
"acc_norm": 0.31286894923258557,
|
367 |
+
"acc_norm_stderr": 0.015941010118302654
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/llama-2-ko-7b-instruct",
|
436 |
+
"model_sha": "3c590472282b5de4c76d846153db5f41b82c1b62",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3293515358361775,
|
5 |
+
"acc_stderr": 0.013734057652635474,
|
6 |
+
"acc_norm": 0.386518771331058,
|
7 |
+
"acc_norm_stderr": 0.014230084761910474
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3835889265086636,
|
11 |
+
"acc_stderr": 0.00485265887677539,
|
12 |
+
"acc_norm": 0.5022903804023103,
|
13 |
+
"acc_norm_stderr": 0.004989729059957435
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.391812865497076,
|
17 |
+
"acc_stderr": 0.03743979825926401,
|
18 |
+
"acc_norm": 0.391812865497076,
|
19 |
+
"acc_norm_stderr": 0.03743979825926401
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2815533980582524,
|
23 |
+
"acc_stderr": 0.04453254836326466,
|
24 |
+
"acc_norm": 0.2815533980582524,
|
25 |
+
"acc_norm_stderr": 0.04453254836326466
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.367816091954023,
|
29 |
+
"acc_stderr": 0.01724382889184626,
|
30 |
+
"acc_norm": 0.367816091954023,
|
31 |
+
"acc_norm_stderr": 0.01724382889184626
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.34074074074074073,
|
35 |
+
"acc_stderr": 0.04094376269996795,
|
36 |
+
"acc_norm": 0.34074074074074073,
|
37 |
+
"acc_norm_stderr": 0.04094376269996795
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.2851063829787234,
|
47 |
+
"acc_stderr": 0.02951319662553935,
|
48 |
+
"acc_norm": 0.2851063829787234,
|
49 |
+
"acc_norm_stderr": 0.02951319662553935
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3132530120481928,
|
53 |
+
"acc_stderr": 0.03610805018031024,
|
54 |
+
"acc_norm": 0.3132530120481928,
|
55 |
+
"acc_norm_stderr": 0.03610805018031024
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.3858520900321543,
|
59 |
+
"acc_stderr": 0.027648149599751464,
|
60 |
+
"acc_norm": 0.3858520900321543,
|
61 |
+
"acc_norm_stderr": 0.027648149599751464
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.43946188340807174,
|
65 |
+
"acc_stderr": 0.03331092511038179,
|
66 |
+
"acc_norm": 0.43946188340807174,
|
67 |
+
"acc_norm_stderr": 0.03331092511038179
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4351145038167939,
|
71 |
+
"acc_stderr": 0.04348208051644858,
|
72 |
+
"acc_norm": 0.4351145038167939,
|
73 |
+
"acc_norm_stderr": 0.04348208051644858
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.3,
|
77 |
+
"acc_stderr": 0.046056618647183814,
|
78 |
+
"acc_norm": 0.3,
|
79 |
+
"acc_norm_stderr": 0.046056618647183814
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.31313131313131315,
|
83 |
+
"acc_stderr": 0.03304205087813653,
|
84 |
+
"acc_norm": 0.31313131313131315,
|
85 |
+
"acc_norm_stderr": 0.03304205087813653
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3586206896551724,
|
89 |
+
"acc_stderr": 0.039966295748767186,
|
90 |
+
"acc_norm": 0.3586206896551724,
|
91 |
+
"acc_norm_stderr": 0.039966295748767186
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.20588235294117646,
|
95 |
+
"acc_stderr": 0.04023382273617747,
|
96 |
+
"acc_norm": 0.20588235294117646,
|
97 |
+
"acc_norm_stderr": 0.04023382273617747
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.3697478991596639,
|
101 |
+
"acc_stderr": 0.031357095996135904,
|
102 |
+
"acc_norm": 0.3697478991596639,
|
103 |
+
"acc_norm_stderr": 0.031357095996135904
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.2794871794871795,
|
107 |
+
"acc_stderr": 0.022752388839776823,
|
108 |
+
"acc_norm": 0.2794871794871795,
|
109 |
+
"acc_norm_stderr": 0.022752388839776823
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.4,
|
113 |
+
"acc_stderr": 0.049236596391733084,
|
114 |
+
"acc_norm": 0.4,
|
115 |
+
"acc_norm_stderr": 0.049236596391733084
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.2,
|
119 |
+
"acc_stderr": 0.04020151261036846,
|
120 |
+
"acc_norm": 0.2,
|
121 |
+
"acc_norm_stderr": 0.04020151261036846
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.37962962962962965,
|
125 |
+
"acc_stderr": 0.04691521224077742,
|
126 |
+
"acc_norm": 0.37962962962962965,
|
127 |
+
"acc_norm_stderr": 0.04691521224077742
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.22660098522167488,
|
131 |
+
"acc_stderr": 0.02945486383529298,
|
132 |
+
"acc_norm": 0.22660098522167488,
|
133 |
+
"acc_norm_stderr": 0.02945486383529298
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.3225806451612903,
|
137 |
+
"acc_stderr": 0.026593084516572267,
|
138 |
+
"acc_norm": 0.3225806451612903,
|
139 |
+
"acc_norm_stderr": 0.026593084516572267
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.5,
|
143 |
+
"acc_stderr": 0.03275608910402091,
|
144 |
+
"acc_norm": 0.5,
|
145 |
+
"acc_norm_stderr": 0.03275608910402091
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.3471698113207547,
|
149 |
+
"acc_stderr": 0.029300101705549652,
|
150 |
+
"acc_norm": 0.3471698113207547,
|
151 |
+
"acc_norm_stderr": 0.029300101705549652
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.36363636363636365,
|
155 |
+
"acc_stderr": 0.04607582090719976,
|
156 |
+
"acc_norm": 0.36363636363636365,
|
157 |
+
"acc_norm_stderr": 0.04607582090719976
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.23703703703703705,
|
161 |
+
"acc_stderr": 0.025928876132766118,
|
162 |
+
"acc_norm": 0.23703703703703705,
|
163 |
+
"acc_norm_stderr": 0.025928876132766118
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2119205298013245,
|
167 |
+
"acc_stderr": 0.03336767086567977,
|
168 |
+
"acc_norm": 0.2119205298013245,
|
169 |
+
"acc_norm_stderr": 0.03336767086567977
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.39800995024875624,
|
173 |
+
"acc_stderr": 0.034611994290400135,
|
174 |
+
"acc_norm": 0.39800995024875624,
|
175 |
+
"acc_norm_stderr": 0.034611994290400135
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.2774566473988439,
|
179 |
+
"acc_stderr": 0.03414014007044036,
|
180 |
+
"acc_norm": 0.2774566473988439,
|
181 |
+
"acc_norm_stderr": 0.03414014007044036
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.24867724867724866,
|
185 |
+
"acc_stderr": 0.022261817692400175,
|
186 |
+
"acc_norm": 0.24867724867724866,
|
187 |
+
"acc_norm_stderr": 0.022261817692400175
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2916666666666667,
|
191 |
+
"acc_stderr": 0.03800968060554859,
|
192 |
+
"acc_norm": 0.2916666666666667,
|
193 |
+
"acc_norm_stderr": 0.03800968060554859
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.21,
|
197 |
+
"acc_stderr": 0.040936018074033256,
|
198 |
+
"acc_norm": 0.21,
|
199 |
+
"acc_norm_stderr": 0.040936018074033256
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.44,
|
203 |
+
"acc_stderr": 0.04988876515698589,
|
204 |
+
"acc_norm": 0.44,
|
205 |
+
"acc_norm_stderr": 0.04988876515698589
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.38439306358381503,
|
209 |
+
"acc_stderr": 0.026189666966272035,
|
210 |
+
"acc_norm": 0.38439306358381503,
|
211 |
+
"acc_norm_stderr": 0.026189666966272035
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3496932515337423,
|
215 |
+
"acc_stderr": 0.03746668325470022,
|
216 |
+
"acc_norm": 0.3496932515337423,
|
217 |
+
"acc_norm_stderr": 0.03746668325470022
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.3611111111111111,
|
221 |
+
"acc_stderr": 0.026725868809100793,
|
222 |
+
"acc_norm": 0.3611111111111111,
|
223 |
+
"acc_norm_stderr": 0.026725868809100793
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.28,
|
227 |
+
"acc_stderr": 0.04512608598542128,
|
228 |
+
"acc_norm": 0.28,
|
229 |
+
"acc_norm_stderr": 0.04512608598542128
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.35751295336787564,
|
233 |
+
"acc_stderr": 0.03458816042181005,
|
234 |
+
"acc_norm": 0.35751295336787564,
|
235 |
+
"acc_norm_stderr": 0.03458816042181005
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.23684210526315788,
|
239 |
+
"acc_stderr": 0.03999423879281336,
|
240 |
+
"acc_norm": 0.23684210526315788,
|
241 |
+
"acc_norm_stderr": 0.03999423879281336
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3394495412844037,
|
245 |
+
"acc_stderr": 0.02030210934266235,
|
246 |
+
"acc_norm": 0.3394495412844037,
|
247 |
+
"acc_norm_stderr": 0.02030210934266235
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2619047619047619,
|
251 |
+
"acc_stderr": 0.039325376803928704,
|
252 |
+
"acc_norm": 0.2619047619047619,
|
253 |
+
"acc_norm_stderr": 0.039325376803928704
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.3660130718954248,
|
257 |
+
"acc_stderr": 0.027582811415159607,
|
258 |
+
"acc_norm": 0.3660130718954248,
|
259 |
+
"acc_norm_stderr": 0.027582811415159607
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.38,
|
263 |
+
"acc_stderr": 0.04878317312145632,
|
264 |
+
"acc_norm": 0.38,
|
265 |
+
"acc_norm_stderr": 0.04878317312145632
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.49586776859504134,
|
269 |
+
"acc_stderr": 0.04564198767432754,
|
270 |
+
"acc_norm": 0.49586776859504134,
|
271 |
+
"acc_norm_stderr": 0.04564198767432754
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.24342105263157895,
|
275 |
+
"acc_stderr": 0.034923496688842384,
|
276 |
+
"acc_norm": 0.24342105263157895,
|
277 |
+
"acc_norm_stderr": 0.034923496688842384
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3366013071895425,
|
281 |
+
"acc_stderr": 0.019117213911495165,
|
282 |
+
"acc_norm": 0.3366013071895425,
|
283 |
+
"acc_norm_stderr": 0.019117213911495165
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.29432624113475175,
|
287 |
+
"acc_stderr": 0.027187127011503796,
|
288 |
+
"acc_norm": 0.29432624113475175,
|
289 |
+
"acc_norm_stderr": 0.027187127011503796
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.29464285714285715,
|
293 |
+
"acc_stderr": 0.04327040932578728,
|
294 |
+
"acc_norm": 0.29464285714285715,
|
295 |
+
"acc_norm_stderr": 0.04327040932578728
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.2824074074074074,
|
299 |
+
"acc_stderr": 0.030701372111510927,
|
300 |
+
"acc_norm": 0.2824074074074074,
|
301 |
+
"acc_norm_stderr": 0.030701372111510927
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.25139664804469275,
|
305 |
+
"acc_stderr": 0.014508979453553977,
|
306 |
+
"acc_norm": 0.25139664804469275,
|
307 |
+
"acc_norm_stderr": 0.014508979453553977
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.32,
|
311 |
+
"acc_stderr": 0.04688261722621505,
|
312 |
+
"acc_norm": 0.32,
|
313 |
+
"acc_norm_stderr": 0.04688261722621505
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.31,
|
317 |
+
"acc_stderr": 0.04648231987117316,
|
318 |
+
"acc_norm": 0.31,
|
319 |
+
"acc_norm_stderr": 0.04648231987117316
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3786764705882353,
|
323 |
+
"acc_stderr": 0.029465133639776125,
|
324 |
+
"acc_norm": 0.3786764705882353,
|
325 |
+
"acc_norm_stderr": 0.029465133639776125
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.3510204081632653,
|
329 |
+
"acc_stderr": 0.03055531675557364,
|
330 |
+
"acc_norm": 0.3510204081632653,
|
331 |
+
"acc_norm_stderr": 0.03055531675557364
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.4767932489451477,
|
335 |
+
"acc_stderr": 0.032512152011410174,
|
336 |
+
"acc_norm": 0.4767932489451477,
|
337 |
+
"acc_norm_stderr": 0.032512152011410174
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3089960886571056,
|
341 |
+
"acc_stderr": 0.01180172977723925,
|
342 |
+
"acc_norm": 0.3089960886571056,
|
343 |
+
"acc_norm_stderr": 0.01180172977723925
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.31862745098039214,
|
347 |
+
"acc_stderr": 0.032702871814820816,
|
348 |
+
"acc_norm": 0.31862745098039214,
|
349 |
+
"acc_norm_stderr": 0.032702871814820816
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3212121212121212,
|
353 |
+
"acc_stderr": 0.0364620496325381,
|
354 |
+
"acc_norm": 0.3212121212121212,
|
355 |
+
"acc_norm_stderr": 0.0364620496325381
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.25091799265605874,
|
359 |
+
"mc1_stderr": 0.01517698502770769,
|
360 |
+
"mc2": 0.38056097212603235,
|
361 |
+
"mc2_stderr": 0.014936929596682727
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.21605667060212513,
|
365 |
+
"acc_stderr": 0.014149496716043137,
|
366 |
+
"acc_norm": 0.29279811097992914,
|
367 |
+
"acc_norm_stderr": 0.015644823205401337
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/llama-2-ko-7b-instruction-v3",
|
436 |
+
"model_sha": "c0fea9cb31d4ae90aa2ed048f774a9000341b538",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3046075085324232,
|
5 |
+
"acc_stderr": 0.013449522109932492,
|
6 |
+
"acc_norm": 0.363481228668942,
|
7 |
+
"acc_norm_stderr": 0.014056207319068282
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3981278629755029,
|
11 |
+
"acc_stderr": 0.0048851164655502755,
|
12 |
+
"acc_norm": 0.5159330810595499,
|
13 |
+
"acc_norm_stderr": 0.004987247325495624
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.22807017543859648,
|
17 |
+
"acc_stderr": 0.03218093795602357,
|
18 |
+
"acc_norm": 0.22807017543859648,
|
19 |
+
"acc_norm_stderr": 0.03218093795602357
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.24271844660194175,
|
23 |
+
"acc_stderr": 0.04245022486384495,
|
24 |
+
"acc_norm": 0.24271844660194175,
|
25 |
+
"acc_norm_stderr": 0.04245022486384495
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.23754789272030652,
|
29 |
+
"acc_stderr": 0.015218733046150193,
|
30 |
+
"acc_norm": 0.23754789272030652,
|
31 |
+
"acc_norm_stderr": 0.015218733046150193
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.28888888888888886,
|
35 |
+
"acc_stderr": 0.039154506304142495,
|
36 |
+
"acc_norm": 0.28888888888888886,
|
37 |
+
"acc_norm_stderr": 0.039154506304142495
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.27,
|
41 |
+
"acc_stderr": 0.0446196043338474,
|
42 |
+
"acc_norm": 0.27,
|
43 |
+
"acc_norm_stderr": 0.0446196043338474
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.2297872340425532,
|
47 |
+
"acc_stderr": 0.02750175294441242,
|
48 |
+
"acc_norm": 0.2297872340425532,
|
49 |
+
"acc_norm_stderr": 0.02750175294441242
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.2469879518072289,
|
53 |
+
"acc_stderr": 0.03357351982064536,
|
54 |
+
"acc_norm": 0.2469879518072289,
|
55 |
+
"acc_norm_stderr": 0.03357351982064536
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.33440514469453375,
|
59 |
+
"acc_stderr": 0.026795422327893944,
|
60 |
+
"acc_norm": 0.33440514469453375,
|
61 |
+
"acc_norm_stderr": 0.026795422327893944
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.1031390134529148,
|
65 |
+
"acc_stderr": 0.020412564289839272,
|
66 |
+
"acc_norm": 0.1031390134529148,
|
67 |
+
"acc_norm_stderr": 0.020412564289839272
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.2748091603053435,
|
71 |
+
"acc_stderr": 0.039153454088478354,
|
72 |
+
"acc_norm": 0.2748091603053435,
|
73 |
+
"acc_norm_stderr": 0.039153454088478354
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.37,
|
77 |
+
"acc_stderr": 0.048523658709391,
|
78 |
+
"acc_norm": 0.37,
|
79 |
+
"acc_norm_stderr": 0.048523658709391
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.40404040404040403,
|
83 |
+
"acc_stderr": 0.03496130972056128,
|
84 |
+
"acc_norm": 0.40404040404040403,
|
85 |
+
"acc_norm_stderr": 0.03496130972056128
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3310344827586207,
|
89 |
+
"acc_stderr": 0.03921545312467122,
|
90 |
+
"acc_norm": 0.3310344827586207,
|
91 |
+
"acc_norm_stderr": 0.03921545312467122
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.2647058823529412,
|
95 |
+
"acc_stderr": 0.04389869956808777,
|
96 |
+
"acc_norm": 0.2647058823529412,
|
97 |
+
"acc_norm_stderr": 0.04389869956808777
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.18907563025210083,
|
101 |
+
"acc_stderr": 0.02543511943810536,
|
102 |
+
"acc_norm": 0.18907563025210083,
|
103 |
+
"acc_norm_stderr": 0.02543511943810536
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.21794871794871795,
|
107 |
+
"acc_stderr": 0.020932445774463175,
|
108 |
+
"acc_norm": 0.21794871794871795,
|
109 |
+
"acc_norm_stderr": 0.020932445774463175
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.18,
|
113 |
+
"acc_stderr": 0.03861229196653694,
|
114 |
+
"acc_norm": 0.18,
|
115 |
+
"acc_norm_stderr": 0.03861229196653694
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.16,
|
119 |
+
"acc_stderr": 0.03684529491774708,
|
120 |
+
"acc_norm": 0.16,
|
121 |
+
"acc_norm_stderr": 0.03684529491774708
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.26851851851851855,
|
125 |
+
"acc_stderr": 0.04284467968052191,
|
126 |
+
"acc_norm": 0.26851851851851855,
|
127 |
+
"acc_norm_stderr": 0.04284467968052191
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.1921182266009852,
|
131 |
+
"acc_stderr": 0.027719315709614778,
|
132 |
+
"acc_norm": 0.1921182266009852,
|
133 |
+
"acc_norm_stderr": 0.027719315709614778
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.25483870967741934,
|
137 |
+
"acc_stderr": 0.024790118459332204,
|
138 |
+
"acc_norm": 0.25483870967741934,
|
139 |
+
"acc_norm_stderr": 0.024790118459332204
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.2905982905982906,
|
143 |
+
"acc_stderr": 0.029745048572674057,
|
144 |
+
"acc_norm": 0.2905982905982906,
|
145 |
+
"acc_norm_stderr": 0.029745048572674057
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.29056603773584905,
|
149 |
+
"acc_stderr": 0.027943219989337156,
|
150 |
+
"acc_norm": 0.29056603773584905,
|
151 |
+
"acc_norm_stderr": 0.027943219989337156
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.2636363636363636,
|
155 |
+
"acc_stderr": 0.04220224692971987,
|
156 |
+
"acc_norm": 0.2636363636363636,
|
157 |
+
"acc_norm_stderr": 0.04220224692971987
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.25925925925925924,
|
161 |
+
"acc_stderr": 0.026719240783712166,
|
162 |
+
"acc_norm": 0.25925925925925924,
|
163 |
+
"acc_norm_stderr": 0.026719240783712166
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2913907284768212,
|
167 |
+
"acc_stderr": 0.03710185726119995,
|
168 |
+
"acc_norm": 0.2913907284768212,
|
169 |
+
"acc_norm_stderr": 0.03710185726119995
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.19402985074626866,
|
173 |
+
"acc_stderr": 0.027962677604768914,
|
174 |
+
"acc_norm": 0.19402985074626866,
|
175 |
+
"acc_norm_stderr": 0.027962677604768914
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.2254335260115607,
|
179 |
+
"acc_stderr": 0.03186209851641143,
|
180 |
+
"acc_norm": 0.2254335260115607,
|
181 |
+
"acc_norm_stderr": 0.03186209851641143
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.24867724867724866,
|
185 |
+
"acc_stderr": 0.02226181769240018,
|
186 |
+
"acc_norm": 0.24867724867724866,
|
187 |
+
"acc_norm_stderr": 0.02226181769240018
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3125,
|
191 |
+
"acc_stderr": 0.038760854559127644,
|
192 |
+
"acc_norm": 0.3125,
|
193 |
+
"acc_norm_stderr": 0.038760854559127644
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.21,
|
197 |
+
"acc_stderr": 0.04093601807403326,
|
198 |
+
"acc_norm": 0.21,
|
199 |
+
"acc_norm_stderr": 0.04093601807403326
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.27,
|
203 |
+
"acc_stderr": 0.044619604333847394,
|
204 |
+
"acc_norm": 0.27,
|
205 |
+
"acc_norm_stderr": 0.044619604333847394
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.2774566473988439,
|
209 |
+
"acc_stderr": 0.024105712607754307,
|
210 |
+
"acc_norm": 0.2774566473988439,
|
211 |
+
"acc_norm_stderr": 0.024105712607754307
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3067484662576687,
|
215 |
+
"acc_stderr": 0.036230899157241474,
|
216 |
+
"acc_norm": 0.3067484662576687,
|
217 |
+
"acc_norm_stderr": 0.036230899157241474
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.24074074074074073,
|
221 |
+
"acc_stderr": 0.023788583551658537,
|
222 |
+
"acc_norm": 0.24074074074074073,
|
223 |
+
"acc_norm_stderr": 0.023788583551658537
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.26,
|
227 |
+
"acc_stderr": 0.04408440022768077,
|
228 |
+
"acc_norm": 0.26,
|
229 |
+
"acc_norm_stderr": 0.04408440022768077
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.22797927461139897,
|
233 |
+
"acc_stderr": 0.030276909945178253,
|
234 |
+
"acc_norm": 0.22797927461139897,
|
235 |
+
"acc_norm_stderr": 0.030276909945178253
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.30701754385964913,
|
239 |
+
"acc_stderr": 0.043391383225798594,
|
240 |
+
"acc_norm": 0.30701754385964913,
|
241 |
+
"acc_norm_stderr": 0.043391383225798594
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3376146788990826,
|
245 |
+
"acc_stderr": 0.02027526598663891,
|
246 |
+
"acc_norm": 0.3376146788990826,
|
247 |
+
"acc_norm_stderr": 0.02027526598663891
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.23809523809523808,
|
251 |
+
"acc_stderr": 0.038095238095238126,
|
252 |
+
"acc_norm": 0.23809523809523808,
|
253 |
+
"acc_norm_stderr": 0.038095238095238126
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.27450980392156865,
|
257 |
+
"acc_stderr": 0.025553169991826524,
|
258 |
+
"acc_norm": 0.27450980392156865,
|
259 |
+
"acc_norm_stderr": 0.025553169991826524
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.32,
|
263 |
+
"acc_stderr": 0.04688261722621505,
|
264 |
+
"acc_norm": 0.32,
|
265 |
+
"acc_norm_stderr": 0.04688261722621505
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.24793388429752067,
|
269 |
+
"acc_stderr": 0.03941897526516302,
|
270 |
+
"acc_norm": 0.24793388429752067,
|
271 |
+
"acc_norm_stderr": 0.03941897526516302
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.23684210526315788,
|
275 |
+
"acc_stderr": 0.03459777606810537,
|
276 |
+
"acc_norm": 0.23684210526315788,
|
277 |
+
"acc_norm_stderr": 0.03459777606810537
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.25980392156862747,
|
281 |
+
"acc_stderr": 0.01774089950917779,
|
282 |
+
"acc_norm": 0.25980392156862747,
|
283 |
+
"acc_norm_stderr": 0.01774089950917779
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.22340425531914893,
|
287 |
+
"acc_stderr": 0.024847921358063962,
|
288 |
+
"acc_norm": 0.22340425531914893,
|
289 |
+
"acc_norm_stderr": 0.024847921358063962
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.25892857142857145,
|
293 |
+
"acc_stderr": 0.04157751539865629,
|
294 |
+
"acc_norm": 0.25892857142857145,
|
295 |
+
"acc_norm_stderr": 0.04157751539865629
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.24537037037037038,
|
299 |
+
"acc_stderr": 0.029346665094372937,
|
300 |
+
"acc_norm": 0.24537037037037038,
|
301 |
+
"acc_norm_stderr": 0.029346665094372937
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.27262569832402234,
|
305 |
+
"acc_stderr": 0.014893391735249608,
|
306 |
+
"acc_norm": 0.27262569832402234,
|
307 |
+
"acc_norm_stderr": 0.014893391735249608
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.32,
|
311 |
+
"acc_stderr": 0.046882617226215034,
|
312 |
+
"acc_norm": 0.32,
|
313 |
+
"acc_norm_stderr": 0.046882617226215034
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.31,
|
317 |
+
"acc_stderr": 0.04648231987117316,
|
318 |
+
"acc_norm": 0.31,
|
319 |
+
"acc_norm_stderr": 0.04648231987117316
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3786764705882353,
|
323 |
+
"acc_stderr": 0.02946513363977613,
|
324 |
+
"acc_norm": 0.3786764705882353,
|
325 |
+
"acc_norm_stderr": 0.02946513363977613
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.35918367346938773,
|
329 |
+
"acc_stderr": 0.03071356045510849,
|
330 |
+
"acc_norm": 0.35918367346938773,
|
331 |
+
"acc_norm_stderr": 0.03071356045510849
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.24472573839662448,
|
335 |
+
"acc_stderr": 0.027985699387036416,
|
336 |
+
"acc_norm": 0.24472573839662448,
|
337 |
+
"acc_norm_stderr": 0.027985699387036416
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2588005215123859,
|
341 |
+
"acc_stderr": 0.011186109046564608,
|
342 |
+
"acc_norm": 0.2588005215123859,
|
343 |
+
"acc_norm_stderr": 0.011186109046564608
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.28431372549019607,
|
347 |
+
"acc_stderr": 0.031660096793998116,
|
348 |
+
"acc_norm": 0.28431372549019607,
|
349 |
+
"acc_norm_stderr": 0.031660096793998116
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.2,
|
353 |
+
"acc_stderr": 0.03123475237772118,
|
354 |
+
"acc_norm": 0.2,
|
355 |
+
"acc_norm_stderr": 0.03123475237772118
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.27906976744186046,
|
359 |
+
"mc1_stderr": 0.015702107090627887,
|
360 |
+
"mc2": 0.4515720476496737,
|
361 |
+
"mc2_stderr": 0.015493161984611252
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2680047225501771,
|
365 |
+
"acc_stderr": 0.015227905796335147,
|
366 |
+
"acc_norm": 0.3707201889020071,
|
367 |
+
"acc_norm_stderr": 0.016605801289212598
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/polyglot-ko-12.8b-instruct",
|
436 |
+
"model_sha": "a8354bcedc167e8e1f7dac8a347bf4b61d9c9bf0",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3643344709897611,
|
5 |
+
"acc_stderr": 0.014063260279882417,
|
6 |
+
"acc_norm": 0.4112627986348123,
|
7 |
+
"acc_norm_stderr": 0.014379441068522084
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3732324238199562,
|
11 |
+
"acc_stderr": 0.004826746160830189,
|
12 |
+
"acc_norm": 0.4751045608444533,
|
13 |
+
"acc_norm_stderr": 0.004983592410934169
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4853801169590643,
|
17 |
+
"acc_stderr": 0.038331852752130205,
|
18 |
+
"acc_norm": 0.4853801169590643,
|
19 |
+
"acc_norm_stderr": 0.038331852752130205
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5631067961165048,
|
23 |
+
"acc_stderr": 0.04911147107365777,
|
24 |
+
"acc_norm": 0.5631067961165048,
|
25 |
+
"acc_norm_stderr": 0.04911147107365777
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.4878671775223499,
|
29 |
+
"acc_stderr": 0.017874698667491355,
|
30 |
+
"acc_norm": 0.4878671775223499,
|
31 |
+
"acc_norm_stderr": 0.017874698667491355
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.4148148148148148,
|
35 |
+
"acc_stderr": 0.042561937679014075,
|
36 |
+
"acc_norm": 0.4148148148148148,
|
37 |
+
"acc_norm_stderr": 0.042561937679014075
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4,
|
47 |
+
"acc_stderr": 0.03202563076101735,
|
48 |
+
"acc_norm": 0.4,
|
49 |
+
"acc_norm_stderr": 0.03202563076101735
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3855421686746988,
|
53 |
+
"acc_stderr": 0.03789134424611548,
|
54 |
+
"acc_norm": 0.3855421686746988,
|
55 |
+
"acc_norm_stderr": 0.03789134424611548
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4533762057877814,
|
59 |
+
"acc_stderr": 0.028274359854894245,
|
60 |
+
"acc_norm": 0.4533762057877814,
|
61 |
+
"acc_norm_stderr": 0.028274359854894245
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4663677130044843,
|
65 |
+
"acc_stderr": 0.033481800170603065,
|
66 |
+
"acc_norm": 0.4663677130044843,
|
67 |
+
"acc_norm_stderr": 0.033481800170603065
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.48091603053435117,
|
71 |
+
"acc_stderr": 0.04382094705550988,
|
72 |
+
"acc_norm": 0.48091603053435117,
|
73 |
+
"acc_norm_stderr": 0.04382094705550988
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.04960449637488583,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.04960449637488583
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5909090909090909,
|
83 |
+
"acc_stderr": 0.03502975799413008,
|
84 |
+
"acc_norm": 0.5909090909090909,
|
85 |
+
"acc_norm_stderr": 0.03502975799413008
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4413793103448276,
|
89 |
+
"acc_stderr": 0.04137931034482758,
|
90 |
+
"acc_norm": 0.4413793103448276,
|
91 |
+
"acc_norm_stderr": 0.04137931034482758
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.16666666666666666,
|
95 |
+
"acc_stderr": 0.03708284662416544,
|
96 |
+
"acc_norm": 0.16666666666666666,
|
97 |
+
"acc_norm_stderr": 0.03708284662416544
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4495798319327731,
|
101 |
+
"acc_stderr": 0.03231293497137707,
|
102 |
+
"acc_norm": 0.4495798319327731,
|
103 |
+
"acc_norm_stderr": 0.03231293497137707
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4358974358974359,
|
107 |
+
"acc_stderr": 0.025141801511177498,
|
108 |
+
"acc_norm": 0.4358974358974359,
|
109 |
+
"acc_norm_stderr": 0.025141801511177498
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.46,
|
113 |
+
"acc_stderr": 0.05009082659620333,
|
114 |
+
"acc_norm": 0.46,
|
115 |
+
"acc_norm_stderr": 0.05009082659620333
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.42,
|
119 |
+
"acc_stderr": 0.049604496374885836,
|
120 |
+
"acc_norm": 0.42,
|
121 |
+
"acc_norm_stderr": 0.049604496374885836
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5277777777777778,
|
125 |
+
"acc_stderr": 0.048262172941398944,
|
126 |
+
"acc_norm": 0.5277777777777778,
|
127 |
+
"acc_norm_stderr": 0.048262172941398944
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3645320197044335,
|
131 |
+
"acc_stderr": 0.0338640574606209,
|
132 |
+
"acc_norm": 0.3645320197044335,
|
133 |
+
"acc_norm_stderr": 0.0338640574606209
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.47096774193548385,
|
137 |
+
"acc_stderr": 0.028396016402761005,
|
138 |
+
"acc_norm": 0.47096774193548385,
|
139 |
+
"acc_norm_stderr": 0.028396016402761005
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6282051282051282,
|
143 |
+
"acc_stderr": 0.03166098891888078,
|
144 |
+
"acc_norm": 0.6282051282051282,
|
145 |
+
"acc_norm_stderr": 0.03166098891888078
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4528301886792453,
|
149 |
+
"acc_stderr": 0.030635627957961823,
|
150 |
+
"acc_norm": 0.4528301886792453,
|
151 |
+
"acc_norm_stderr": 0.030635627957961823
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.509090909090909,
|
155 |
+
"acc_stderr": 0.04788339768702861,
|
156 |
+
"acc_norm": 0.509090909090909,
|
157 |
+
"acc_norm_stderr": 0.04788339768702861
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3111111111111111,
|
161 |
+
"acc_stderr": 0.028226446749683526,
|
162 |
+
"acc_norm": 0.3111111111111111,
|
163 |
+
"acc_norm_stderr": 0.028226446749683526
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.31788079470198677,
|
167 |
+
"acc_stderr": 0.038020397601079024,
|
168 |
+
"acc_norm": 0.31788079470198677,
|
169 |
+
"acc_norm_stderr": 0.038020397601079024
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5771144278606966,
|
173 |
+
"acc_stderr": 0.034932317774212816,
|
174 |
+
"acc_norm": 0.5771144278606966,
|
175 |
+
"acc_norm_stderr": 0.034932317774212816
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3815028901734104,
|
179 |
+
"acc_stderr": 0.03703851193099521,
|
180 |
+
"acc_norm": 0.3815028901734104,
|
181 |
+
"acc_norm_stderr": 0.03703851193099521
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3253968253968254,
|
185 |
+
"acc_stderr": 0.024130158299762613,
|
186 |
+
"acc_norm": 0.3253968253968254,
|
187 |
+
"acc_norm_stderr": 0.024130158299762613
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3819444444444444,
|
191 |
+
"acc_stderr": 0.040629907841466674,
|
192 |
+
"acc_norm": 0.3819444444444444,
|
193 |
+
"acc_norm_stderr": 0.040629907841466674
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.26,
|
197 |
+
"acc_stderr": 0.04408440022768079,
|
198 |
+
"acc_norm": 0.26,
|
199 |
+
"acc_norm_stderr": 0.04408440022768079
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.6,
|
203 |
+
"acc_stderr": 0.049236596391733084,
|
204 |
+
"acc_norm": 0.6,
|
205 |
+
"acc_norm_stderr": 0.049236596391733084
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5144508670520231,
|
209 |
+
"acc_stderr": 0.02690784985628254,
|
210 |
+
"acc_norm": 0.5144508670520231,
|
211 |
+
"acc_norm_stderr": 0.02690784985628254
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.44171779141104295,
|
215 |
+
"acc_stderr": 0.03901591825836184,
|
216 |
+
"acc_norm": 0.44171779141104295,
|
217 |
+
"acc_norm_stderr": 0.03901591825836184
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4444444444444444,
|
221 |
+
"acc_stderr": 0.02764847787741332,
|
222 |
+
"acc_norm": 0.4444444444444444,
|
223 |
+
"acc_norm_stderr": 0.02764847787741332
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.35,
|
227 |
+
"acc_stderr": 0.0479372485441102,
|
228 |
+
"acc_norm": 0.35,
|
229 |
+
"acc_norm_stderr": 0.0479372485441102
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5077720207253886,
|
233 |
+
"acc_stderr": 0.03608003225569654,
|
234 |
+
"acc_norm": 0.5077720207253886,
|
235 |
+
"acc_norm_stderr": 0.03608003225569654
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.19298245614035087,
|
239 |
+
"acc_stderr": 0.037124548537213684,
|
240 |
+
"acc_norm": 0.19298245614035087,
|
241 |
+
"acc_norm_stderr": 0.037124548537213684
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.47889908256880737,
|
245 |
+
"acc_stderr": 0.021418224754264643,
|
246 |
+
"acc_norm": 0.47889908256880737,
|
247 |
+
"acc_norm_stderr": 0.021418224754264643
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.373015873015873,
|
251 |
+
"acc_stderr": 0.04325506042017086,
|
252 |
+
"acc_norm": 0.373015873015873,
|
253 |
+
"acc_norm_stderr": 0.04325506042017086
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.43790849673202614,
|
257 |
+
"acc_stderr": 0.028408302020332687,
|
258 |
+
"acc_norm": 0.43790849673202614,
|
259 |
+
"acc_norm_stderr": 0.028408302020332687
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.42,
|
263 |
+
"acc_stderr": 0.04960449637488584,
|
264 |
+
"acc_norm": 0.42,
|
265 |
+
"acc_norm_stderr": 0.04960449637488584
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6859504132231405,
|
269 |
+
"acc_stderr": 0.042369647530410184,
|
270 |
+
"acc_norm": 0.6859504132231405,
|
271 |
+
"acc_norm_stderr": 0.042369647530410184
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.34868421052631576,
|
275 |
+
"acc_stderr": 0.03878139888797609,
|
276 |
+
"acc_norm": 0.34868421052631576,
|
277 |
+
"acc_norm_stderr": 0.03878139888797609
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3480392156862745,
|
281 |
+
"acc_stderr": 0.01927099870822398,
|
282 |
+
"acc_norm": 0.3480392156862745,
|
283 |
+
"acc_norm_stderr": 0.01927099870822398
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3475177304964539,
|
287 |
+
"acc_stderr": 0.028406627809590947,
|
288 |
+
"acc_norm": 0.3475177304964539,
|
289 |
+
"acc_norm_stderr": 0.028406627809590947
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.23214285714285715,
|
293 |
+
"acc_stderr": 0.040073418097558065,
|
294 |
+
"acc_norm": 0.23214285714285715,
|
295 |
+
"acc_norm_stderr": 0.040073418097558065
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3101851851851852,
|
299 |
+
"acc_stderr": 0.031546962856566295,
|
300 |
+
"acc_norm": 0.3101851851851852,
|
301 |
+
"acc_norm_stderr": 0.031546962856566295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.3106145251396648,
|
305 |
+
"acc_stderr": 0.015476515438005566,
|
306 |
+
"acc_norm": 0.3106145251396648,
|
307 |
+
"acc_norm_stderr": 0.015476515438005566
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.41,
|
311 |
+
"acc_stderr": 0.049431107042371025,
|
312 |
+
"acc_norm": 0.41,
|
313 |
+
"acc_norm_stderr": 0.049431107042371025
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.44,
|
317 |
+
"acc_stderr": 0.04988876515698589,
|
318 |
+
"acc_norm": 0.44,
|
319 |
+
"acc_norm_stderr": 0.04988876515698589
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3235294117647059,
|
323 |
+
"acc_stderr": 0.02841820861940679,
|
324 |
+
"acc_norm": 0.3235294117647059,
|
325 |
+
"acc_norm_stderr": 0.02841820861940679
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5428571428571428,
|
329 |
+
"acc_stderr": 0.03189141832421396,
|
330 |
+
"acc_norm": 0.5428571428571428,
|
331 |
+
"acc_norm_stderr": 0.03189141832421396
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5780590717299579,
|
335 |
+
"acc_stderr": 0.032148146302403695,
|
336 |
+
"acc_norm": 0.5780590717299579,
|
337 |
+
"acc_norm_stderr": 0.032148146302403695
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.32333767926988266,
|
341 |
+
"acc_stderr": 0.011946565758447202,
|
342 |
+
"acc_norm": 0.32333767926988266,
|
343 |
+
"acc_norm_stderr": 0.011946565758447202
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.47549019607843135,
|
347 |
+
"acc_stderr": 0.035050931943487976,
|
348 |
+
"acc_norm": 0.47549019607843135,
|
349 |
+
"acc_norm_stderr": 0.035050931943487976
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5393939393939394,
|
353 |
+
"acc_stderr": 0.03892207016552012,
|
354 |
+
"acc_norm": 0.5393939393939394,
|
355 |
+
"acc_norm_stderr": 0.03892207016552012
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.35006119951040393,
|
359 |
+
"mc1_stderr": 0.01669794942015103,
|
360 |
+
"mc2": 0.5148844380994511,
|
361 |
+
"mc2_stderr": 0.015947695748354234
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.42857142857142855,
|
365 |
+
"acc_stderr": 0.017014038119297473,
|
366 |
+
"acc_norm": 0.44155844155844154,
|
367 |
+
"acc_norm_stderr": 0.0170725258755631
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42MARU/sitebunny-13b",
|
436 |
+
"model_sha": "15c8578d2be688d6b03ed2076658865bb8752673",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2636518771331058,
|
5 |
+
"acc_stderr": 0.01287592915129705,
|
6 |
+
"acc_norm": 0.32593856655290104,
|
7 |
+
"acc_norm_stderr": 0.013697432466693242
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3563035251941844,
|
11 |
+
"acc_stderr": 0.004779276329704052,
|
12 |
+
"acc_norm": 0.4473212507468632,
|
13 |
+
"acc_norm_stderr": 0.004962010338226348
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.23976608187134502,
|
17 |
+
"acc_stderr": 0.03274485211946956,
|
18 |
+
"acc_norm": 0.23976608187134502,
|
19 |
+
"acc_norm_stderr": 0.03274485211946956
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.1941747572815534,
|
23 |
+
"acc_stderr": 0.03916667762822584,
|
24 |
+
"acc_norm": 0.1941747572815534,
|
25 |
+
"acc_norm_stderr": 0.03916667762822584
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.22094508301404853,
|
29 |
+
"acc_stderr": 0.014836205167333574,
|
30 |
+
"acc_norm": 0.22094508301404853,
|
31 |
+
"acc_norm_stderr": 0.014836205167333574
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.18518518518518517,
|
35 |
+
"acc_stderr": 0.0335567721631314,
|
36 |
+
"acc_norm": 0.18518518518518517,
|
37 |
+
"acc_norm_stderr": 0.0335567721631314
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.27,
|
41 |
+
"acc_stderr": 0.04461960433384741,
|
42 |
+
"acc_norm": 0.27,
|
43 |
+
"acc_norm_stderr": 0.04461960433384741
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.2851063829787234,
|
47 |
+
"acc_stderr": 0.02951319662553935,
|
48 |
+
"acc_norm": 0.2851063829787234,
|
49 |
+
"acc_norm_stderr": 0.02951319662553935
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3072289156626506,
|
53 |
+
"acc_stderr": 0.03591566797824664,
|
54 |
+
"acc_norm": 0.3072289156626506,
|
55 |
+
"acc_norm_stderr": 0.03591566797824664
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.2347266881028939,
|
59 |
+
"acc_stderr": 0.024071805887677045,
|
60 |
+
"acc_norm": 0.2347266881028939,
|
61 |
+
"acc_norm_stderr": 0.024071805887677045
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.2242152466367713,
|
65 |
+
"acc_stderr": 0.027991534258519527,
|
66 |
+
"acc_norm": 0.2242152466367713,
|
67 |
+
"acc_norm_stderr": 0.027991534258519527
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.2900763358778626,
|
71 |
+
"acc_stderr": 0.03980066246467765,
|
72 |
+
"acc_norm": 0.2900763358778626,
|
73 |
+
"acc_norm_stderr": 0.03980066246467765
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.28,
|
77 |
+
"acc_stderr": 0.04512608598542127,
|
78 |
+
"acc_norm": 0.28,
|
79 |
+
"acc_norm_stderr": 0.04512608598542127
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.2474747474747475,
|
83 |
+
"acc_stderr": 0.030746300742124484,
|
84 |
+
"acc_norm": 0.2474747474747475,
|
85 |
+
"acc_norm_stderr": 0.030746300742124484
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.21379310344827587,
|
89 |
+
"acc_stderr": 0.034165204477475494,
|
90 |
+
"acc_norm": 0.21379310344827587,
|
91 |
+
"acc_norm_stderr": 0.034165204477475494
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.22549019607843138,
|
95 |
+
"acc_stderr": 0.041583075330832865,
|
96 |
+
"acc_norm": 0.22549019607843138,
|
97 |
+
"acc_norm_stderr": 0.041583075330832865
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.3235294117647059,
|
101 |
+
"acc_stderr": 0.030388353551886835,
|
102 |
+
"acc_norm": 0.3235294117647059,
|
103 |
+
"acc_norm_stderr": 0.030388353551886835
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.36153846153846153,
|
107 |
+
"acc_stderr": 0.02435958146539698,
|
108 |
+
"acc_norm": 0.36153846153846153,
|
109 |
+
"acc_norm_stderr": 0.02435958146539698
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.17,
|
113 |
+
"acc_stderr": 0.03775251680686371,
|
114 |
+
"acc_norm": 0.17,
|
115 |
+
"acc_norm_stderr": 0.03775251680686371
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.34,
|
119 |
+
"acc_stderr": 0.04760952285695235,
|
120 |
+
"acc_norm": 0.34,
|
121 |
+
"acc_norm_stderr": 0.04760952285695235
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.2037037037037037,
|
125 |
+
"acc_stderr": 0.038935425188248475,
|
126 |
+
"acc_norm": 0.2037037037037037,
|
127 |
+
"acc_norm_stderr": 0.038935425188248475
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.270935960591133,
|
131 |
+
"acc_stderr": 0.031270907132976984,
|
132 |
+
"acc_norm": 0.270935960591133,
|
133 |
+
"acc_norm_stderr": 0.031270907132976984
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.2870967741935484,
|
137 |
+
"acc_stderr": 0.025736542745594525,
|
138 |
+
"acc_norm": 0.2870967741935484,
|
139 |
+
"acc_norm_stderr": 0.025736542745594525
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.20085470085470086,
|
143 |
+
"acc_stderr": 0.02624677294689047,
|
144 |
+
"acc_norm": 0.20085470085470086,
|
145 |
+
"acc_norm_stderr": 0.02624677294689047
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.25660377358490566,
|
149 |
+
"acc_stderr": 0.026880647889051985,
|
150 |
+
"acc_norm": 0.25660377358490566,
|
151 |
+
"acc_norm_stderr": 0.026880647889051985
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.2636363636363636,
|
155 |
+
"acc_stderr": 0.04220224692971987,
|
156 |
+
"acc_norm": 0.2636363636363636,
|
157 |
+
"acc_norm_stderr": 0.04220224692971987
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.24814814814814815,
|
161 |
+
"acc_stderr": 0.0263357394040558,
|
162 |
+
"acc_norm": 0.24814814814814815,
|
163 |
+
"acc_norm_stderr": 0.0263357394040558
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.3509933774834437,
|
167 |
+
"acc_stderr": 0.03896981964257374,
|
168 |
+
"acc_norm": 0.3509933774834437,
|
169 |
+
"acc_norm_stderr": 0.03896981964257374
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.2935323383084577,
|
173 |
+
"acc_stderr": 0.03220024104534205,
|
174 |
+
"acc_norm": 0.2935323383084577,
|
175 |
+
"acc_norm_stderr": 0.03220024104534205
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.26011560693641617,
|
179 |
+
"acc_stderr": 0.03345036916788991,
|
180 |
+
"acc_norm": 0.26011560693641617,
|
181 |
+
"acc_norm_stderr": 0.03345036916788991
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.24338624338624337,
|
185 |
+
"acc_stderr": 0.022101128787415426,
|
186 |
+
"acc_norm": 0.24338624338624337,
|
187 |
+
"acc_norm_stderr": 0.022101128787415426
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2569444444444444,
|
191 |
+
"acc_stderr": 0.03653946969442099,
|
192 |
+
"acc_norm": 0.2569444444444444,
|
193 |
+
"acc_norm_stderr": 0.03653946969442099
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.26,
|
197 |
+
"acc_stderr": 0.0440844002276808,
|
198 |
+
"acc_norm": 0.26,
|
199 |
+
"acc_norm_stderr": 0.0440844002276808
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.25,
|
203 |
+
"acc_stderr": 0.04351941398892446,
|
204 |
+
"acc_norm": 0.25,
|
205 |
+
"acc_norm_stderr": 0.04351941398892446
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.2254335260115607,
|
209 |
+
"acc_stderr": 0.022497230190967547,
|
210 |
+
"acc_norm": 0.2254335260115607,
|
211 |
+
"acc_norm_stderr": 0.022497230190967547
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.26993865030674846,
|
215 |
+
"acc_stderr": 0.034878251684978906,
|
216 |
+
"acc_norm": 0.26993865030674846,
|
217 |
+
"acc_norm_stderr": 0.034878251684978906
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.28703703703703703,
|
221 |
+
"acc_stderr": 0.025171041915309684,
|
222 |
+
"acc_norm": 0.28703703703703703,
|
223 |
+
"acc_norm_stderr": 0.025171041915309684
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.35751295336787564,
|
233 |
+
"acc_stderr": 0.03458816042181006,
|
234 |
+
"acc_norm": 0.35751295336787564,
|
235 |
+
"acc_norm_stderr": 0.03458816042181006
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.30701754385964913,
|
239 |
+
"acc_stderr": 0.043391383225798594,
|
240 |
+
"acc_norm": 0.30701754385964913,
|
241 |
+
"acc_norm_stderr": 0.043391383225798594
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.24954128440366974,
|
245 |
+
"acc_stderr": 0.018553897629501614,
|
246 |
+
"acc_norm": 0.24954128440366974,
|
247 |
+
"acc_norm_stderr": 0.018553897629501614
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.373015873015873,
|
251 |
+
"acc_stderr": 0.04325506042017086,
|
252 |
+
"acc_norm": 0.373015873015873,
|
253 |
+
"acc_norm_stderr": 0.04325506042017086
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.24183006535947713,
|
257 |
+
"acc_stderr": 0.024518195641879334,
|
258 |
+
"acc_norm": 0.24183006535947713,
|
259 |
+
"acc_norm_stderr": 0.024518195641879334
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.2,
|
263 |
+
"acc_stderr": 0.04020151261036846,
|
264 |
+
"acc_norm": 0.2,
|
265 |
+
"acc_norm_stderr": 0.04020151261036846
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.38016528925619836,
|
269 |
+
"acc_stderr": 0.04431324501968432,
|
270 |
+
"acc_norm": 0.38016528925619836,
|
271 |
+
"acc_norm_stderr": 0.04431324501968432
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.21710526315789475,
|
275 |
+
"acc_stderr": 0.03355045304882924,
|
276 |
+
"acc_norm": 0.21710526315789475,
|
277 |
+
"acc_norm_stderr": 0.03355045304882924
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2549019607843137,
|
281 |
+
"acc_stderr": 0.017630827375148383,
|
282 |
+
"acc_norm": 0.2549019607843137,
|
283 |
+
"acc_norm_stderr": 0.017630827375148383
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.26595744680851063,
|
287 |
+
"acc_stderr": 0.026358065698880592,
|
288 |
+
"acc_norm": 0.26595744680851063,
|
289 |
+
"acc_norm_stderr": 0.026358065698880592
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.25892857142857145,
|
293 |
+
"acc_stderr": 0.041577515398656284,
|
294 |
+
"acc_norm": 0.25892857142857145,
|
295 |
+
"acc_norm_stderr": 0.041577515398656284
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4722222222222222,
|
299 |
+
"acc_stderr": 0.0340470532865388,
|
300 |
+
"acc_norm": 0.4722222222222222,
|
301 |
+
"acc_norm_stderr": 0.0340470532865388
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.27262569832402234,
|
305 |
+
"acc_stderr": 0.014893391735249608,
|
306 |
+
"acc_norm": 0.27262569832402234,
|
307 |
+
"acc_norm_stderr": 0.014893391735249608
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.31,
|
311 |
+
"acc_stderr": 0.04648231987117316,
|
312 |
+
"acc_norm": 0.31,
|
313 |
+
"acc_norm_stderr": 0.04648231987117316
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.23,
|
317 |
+
"acc_stderr": 0.04229525846816505,
|
318 |
+
"acc_norm": 0.23,
|
319 |
+
"acc_norm_stderr": 0.04229525846816505
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.4522058823529412,
|
323 |
+
"acc_stderr": 0.030233758551596452,
|
324 |
+
"acc_norm": 0.4522058823529412,
|
325 |
+
"acc_norm_stderr": 0.030233758551596452
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.27346938775510204,
|
329 |
+
"acc_stderr": 0.02853556033712845,
|
330 |
+
"acc_norm": 0.27346938775510204,
|
331 |
+
"acc_norm_stderr": 0.02853556033712845
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.26582278481012656,
|
335 |
+
"acc_stderr": 0.02875679962965833,
|
336 |
+
"acc_norm": 0.26582278481012656,
|
337 |
+
"acc_norm_stderr": 0.02875679962965833
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2561929595827901,
|
341 |
+
"acc_stderr": 0.011149173153110583,
|
342 |
+
"acc_norm": 0.2561929595827901,
|
343 |
+
"acc_norm_stderr": 0.011149173153110583
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.24019607843137256,
|
347 |
+
"acc_stderr": 0.02998373305591361,
|
348 |
+
"acc_norm": 0.24019607843137256,
|
349 |
+
"acc_norm_stderr": 0.02998373305591361
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.24242424242424243,
|
353 |
+
"acc_stderr": 0.03346409881055953,
|
354 |
+
"acc_norm": 0.24242424242424243,
|
355 |
+
"acc_norm_stderr": 0.03346409881055953
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.24479804161566707,
|
359 |
+
"mc1_stderr": 0.015051869486715006,
|
360 |
+
"mc2": 0.40367736123530334,
|
361 |
+
"mc2_stderr": 0.014824402657107816
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2396694214876033,
|
365 |
+
"acc_stderr": 0.014676495332267253,
|
366 |
+
"acc_norm": 0.36835891381345925,
|
367 |
+
"acc_norm_stderr": 0.016583858982639074
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42dot/42dot_LLM-PLM-1.3B",
|
436 |
+
"model_sha": "a72bf57eb02cd4ea4388a344b4a5893aa95698da",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.28242320819112626,
|
5 |
+
"acc_stderr": 0.01315545688409722,
|
6 |
+
"acc_norm": 0.35494880546075086,
|
7 |
+
"acc_norm_stderr": 0.013983036904094094
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.36317466640111534,
|
11 |
+
"acc_stderr": 0.004799317209902023,
|
12 |
+
"acc_norm": 0.4613622784305915,
|
13 |
+
"acc_norm_stderr": 0.004974860878464429
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.32748538011695905,
|
17 |
+
"acc_stderr": 0.035993357714560276,
|
18 |
+
"acc_norm": 0.32748538011695905,
|
19 |
+
"acc_norm_stderr": 0.035993357714560276
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.13592233009708737,
|
23 |
+
"acc_stderr": 0.033932957297610124,
|
24 |
+
"acc_norm": 0.13592233009708737,
|
25 |
+
"acc_norm_stderr": 0.033932957297610124
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.23754789272030652,
|
29 |
+
"acc_stderr": 0.015218733046150193,
|
30 |
+
"acc_norm": 0.23754789272030652,
|
31 |
+
"acc_norm_stderr": 0.015218733046150193
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.23703703703703705,
|
35 |
+
"acc_stderr": 0.03673731683969506,
|
36 |
+
"acc_norm": 0.23703703703703705,
|
37 |
+
"acc_norm_stderr": 0.03673731683969506
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.28,
|
41 |
+
"acc_stderr": 0.04512608598542127,
|
42 |
+
"acc_norm": 0.28,
|
43 |
+
"acc_norm_stderr": 0.04512608598542127
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.2723404255319149,
|
47 |
+
"acc_stderr": 0.029101290698386698,
|
48 |
+
"acc_norm": 0.2723404255319149,
|
49 |
+
"acc_norm_stderr": 0.029101290698386698
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.2710843373493976,
|
53 |
+
"acc_stderr": 0.034605799075530276,
|
54 |
+
"acc_norm": 0.2710843373493976,
|
55 |
+
"acc_norm_stderr": 0.034605799075530276
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.2604501607717042,
|
59 |
+
"acc_stderr": 0.024926723224845543,
|
60 |
+
"acc_norm": 0.2604501607717042,
|
61 |
+
"acc_norm_stderr": 0.024926723224845543
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.242152466367713,
|
65 |
+
"acc_stderr": 0.028751392398694755,
|
66 |
+
"acc_norm": 0.242152466367713,
|
67 |
+
"acc_norm_stderr": 0.028751392398694755
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.2366412213740458,
|
71 |
+
"acc_stderr": 0.03727673575596918,
|
72 |
+
"acc_norm": 0.2366412213740458,
|
73 |
+
"acc_norm_stderr": 0.03727673575596918
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.27,
|
77 |
+
"acc_stderr": 0.0446196043338474,
|
78 |
+
"acc_norm": 0.27,
|
79 |
+
"acc_norm_stderr": 0.0446196043338474
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.22727272727272727,
|
83 |
+
"acc_stderr": 0.02985751567338641,
|
84 |
+
"acc_norm": 0.22727272727272727,
|
85 |
+
"acc_norm_stderr": 0.02985751567338641
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.2482758620689655,
|
89 |
+
"acc_stderr": 0.036001056927277716,
|
90 |
+
"acc_norm": 0.2482758620689655,
|
91 |
+
"acc_norm_stderr": 0.036001056927277716
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.1568627450980392,
|
95 |
+
"acc_stderr": 0.03618664819936246,
|
96 |
+
"acc_norm": 0.1568627450980392,
|
97 |
+
"acc_norm_stderr": 0.03618664819936246
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.24789915966386555,
|
101 |
+
"acc_stderr": 0.028047967224176892,
|
102 |
+
"acc_norm": 0.24789915966386555,
|
103 |
+
"acc_norm_stderr": 0.028047967224176892
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.23076923076923078,
|
107 |
+
"acc_stderr": 0.021362027725222728,
|
108 |
+
"acc_norm": 0.23076923076923078,
|
109 |
+
"acc_norm_stderr": 0.021362027725222728
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.32,
|
113 |
+
"acc_stderr": 0.04688261722621504,
|
114 |
+
"acc_norm": 0.32,
|
115 |
+
"acc_norm_stderr": 0.04688261722621504
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.19,
|
119 |
+
"acc_stderr": 0.03942772444036623,
|
120 |
+
"acc_norm": 0.19,
|
121 |
+
"acc_norm_stderr": 0.03942772444036623
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.25925925925925924,
|
125 |
+
"acc_stderr": 0.042365112580946336,
|
126 |
+
"acc_norm": 0.25925925925925924,
|
127 |
+
"acc_norm_stderr": 0.042365112580946336
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.18719211822660098,
|
131 |
+
"acc_stderr": 0.027444924966882618,
|
132 |
+
"acc_norm": 0.18719211822660098,
|
133 |
+
"acc_norm_stderr": 0.027444924966882618
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.2645161290322581,
|
137 |
+
"acc_stderr": 0.02509189237885928,
|
138 |
+
"acc_norm": 0.2645161290322581,
|
139 |
+
"acc_norm_stderr": 0.02509189237885928
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.3034188034188034,
|
143 |
+
"acc_stderr": 0.030118210106942652,
|
144 |
+
"acc_norm": 0.3034188034188034,
|
145 |
+
"acc_norm_stderr": 0.030118210106942652
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.2037735849056604,
|
149 |
+
"acc_stderr": 0.02479078450177541,
|
150 |
+
"acc_norm": 0.2037735849056604,
|
151 |
+
"acc_norm_stderr": 0.02479078450177541
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.22727272727272727,
|
155 |
+
"acc_stderr": 0.04013964554072775,
|
156 |
+
"acc_norm": 0.22727272727272727,
|
157 |
+
"acc_norm_stderr": 0.04013964554072775
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.24074074074074073,
|
161 |
+
"acc_stderr": 0.026067159222275794,
|
162 |
+
"acc_norm": 0.24074074074074073,
|
163 |
+
"acc_norm_stderr": 0.026067159222275794
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.24503311258278146,
|
167 |
+
"acc_stderr": 0.035118075718047245,
|
168 |
+
"acc_norm": 0.24503311258278146,
|
169 |
+
"acc_norm_stderr": 0.035118075718047245
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.24875621890547264,
|
173 |
+
"acc_stderr": 0.030567675938916707,
|
174 |
+
"acc_norm": 0.24875621890547264,
|
175 |
+
"acc_norm_stderr": 0.030567675938916707
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.18497109826589594,
|
179 |
+
"acc_stderr": 0.029605623981771204,
|
180 |
+
"acc_norm": 0.18497109826589594,
|
181 |
+
"acc_norm_stderr": 0.029605623981771204
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.25132275132275134,
|
185 |
+
"acc_stderr": 0.022340482339643898,
|
186 |
+
"acc_norm": 0.25132275132275134,
|
187 |
+
"acc_norm_stderr": 0.022340482339643898
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.22916666666666666,
|
191 |
+
"acc_stderr": 0.035146974678623884,
|
192 |
+
"acc_norm": 0.22916666666666666,
|
193 |
+
"acc_norm_stderr": 0.035146974678623884
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.24,
|
197 |
+
"acc_stderr": 0.04292346959909284,
|
198 |
+
"acc_norm": 0.24,
|
199 |
+
"acc_norm_stderr": 0.04292346959909284
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.28,
|
203 |
+
"acc_stderr": 0.04512608598542129,
|
204 |
+
"acc_norm": 0.28,
|
205 |
+
"acc_norm_stderr": 0.04512608598542129
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.23410404624277456,
|
209 |
+
"acc_stderr": 0.022797110278071128,
|
210 |
+
"acc_norm": 0.23410404624277456,
|
211 |
+
"acc_norm_stderr": 0.022797110278071128
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.26380368098159507,
|
215 |
+
"acc_stderr": 0.03462419931615624,
|
216 |
+
"acc_norm": 0.26380368098159507,
|
217 |
+
"acc_norm_stderr": 0.03462419931615624
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.28703703703703703,
|
221 |
+
"acc_stderr": 0.025171041915309684,
|
222 |
+
"acc_norm": 0.28703703703703703,
|
223 |
+
"acc_norm_stderr": 0.025171041915309684
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.20725388601036268,
|
233 |
+
"acc_stderr": 0.029252823291803644,
|
234 |
+
"acc_norm": 0.20725388601036268,
|
235 |
+
"acc_norm_stderr": 0.029252823291803644
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2631578947368421,
|
239 |
+
"acc_stderr": 0.04142439719489362,
|
240 |
+
"acc_norm": 0.2631578947368421,
|
241 |
+
"acc_norm_stderr": 0.04142439719489362
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.22201834862385322,
|
245 |
+
"acc_stderr": 0.017818849564796624,
|
246 |
+
"acc_norm": 0.22201834862385322,
|
247 |
+
"acc_norm_stderr": 0.017818849564796624
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.31746031746031744,
|
251 |
+
"acc_stderr": 0.04163453031302859,
|
252 |
+
"acc_norm": 0.31746031746031744,
|
253 |
+
"acc_norm_stderr": 0.04163453031302859
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.24836601307189543,
|
257 |
+
"acc_stderr": 0.024739981355113592,
|
258 |
+
"acc_norm": 0.24836601307189543,
|
259 |
+
"acc_norm_stderr": 0.024739981355113592
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.22,
|
263 |
+
"acc_stderr": 0.04163331998932269,
|
264 |
+
"acc_norm": 0.22,
|
265 |
+
"acc_norm_stderr": 0.04163331998932269
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.32231404958677684,
|
269 |
+
"acc_stderr": 0.042664163633521685,
|
270 |
+
"acc_norm": 0.32231404958677684,
|
271 |
+
"acc_norm_stderr": 0.042664163633521685
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.20394736842105263,
|
275 |
+
"acc_stderr": 0.0327900040631005,
|
276 |
+
"acc_norm": 0.20394736842105263,
|
277 |
+
"acc_norm_stderr": 0.0327900040631005
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2581699346405229,
|
281 |
+
"acc_stderr": 0.017704531653250075,
|
282 |
+
"acc_norm": 0.2581699346405229,
|
283 |
+
"acc_norm_stderr": 0.017704531653250075
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2375886524822695,
|
287 |
+
"acc_stderr": 0.025389512552729903,
|
288 |
+
"acc_norm": 0.2375886524822695,
|
289 |
+
"acc_norm_stderr": 0.025389512552729903
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.25,
|
293 |
+
"acc_stderr": 0.04109974682633932,
|
294 |
+
"acc_norm": 0.25,
|
295 |
+
"acc_norm_stderr": 0.04109974682633932
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3333333333333333,
|
299 |
+
"acc_stderr": 0.03214952147802747,
|
300 |
+
"acc_norm": 0.3333333333333333,
|
301 |
+
"acc_norm_stderr": 0.03214952147802747
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.27262569832402234,
|
305 |
+
"acc_stderr": 0.014893391735249608,
|
306 |
+
"acc_norm": 0.27262569832402234,
|
307 |
+
"acc_norm_stderr": 0.014893391735249608
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.28,
|
311 |
+
"acc_stderr": 0.04512608598542127,
|
312 |
+
"acc_norm": 0.28,
|
313 |
+
"acc_norm_stderr": 0.04512608598542127
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.19,
|
317 |
+
"acc_stderr": 0.039427724440366234,
|
318 |
+
"acc_norm": 0.19,
|
319 |
+
"acc_norm_stderr": 0.039427724440366234
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.25,
|
323 |
+
"acc_stderr": 0.026303648393696036,
|
324 |
+
"acc_norm": 0.25,
|
325 |
+
"acc_norm_stderr": 0.026303648393696036
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.21224489795918366,
|
329 |
+
"acc_stderr": 0.026176967197866764,
|
330 |
+
"acc_norm": 0.21224489795918366,
|
331 |
+
"acc_norm_stderr": 0.026176967197866764
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.28270042194092826,
|
335 |
+
"acc_stderr": 0.029312814153955914,
|
336 |
+
"acc_norm": 0.28270042194092826,
|
337 |
+
"acc_norm_stderr": 0.029312814153955914
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2503259452411995,
|
341 |
+
"acc_stderr": 0.011064151027165438,
|
342 |
+
"acc_norm": 0.2503259452411995,
|
343 |
+
"acc_norm_stderr": 0.011064151027165438
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.25,
|
347 |
+
"acc_stderr": 0.03039153369274154,
|
348 |
+
"acc_norm": 0.25,
|
349 |
+
"acc_norm_stderr": 0.03039153369274154
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.23030303030303031,
|
353 |
+
"acc_stderr": 0.03287666758603488,
|
354 |
+
"acc_norm": 0.23030303030303031,
|
355 |
+
"acc_norm_stderr": 0.03287666758603488
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.28518971848225216,
|
359 |
+
"mc1_stderr": 0.015805827874454895,
|
360 |
+
"mc2": 0.43765472485909873,
|
361 |
+
"mc2_stderr": 0.015405588178148114
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2550177095631641,
|
365 |
+
"acc_stderr": 0.014985559533428578,
|
366 |
+
"acc_norm": 0.3754427390791027,
|
367 |
+
"acc_norm_stderr": 0.016648411589511095
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "42dot/42dot_LLM-SFT-1.3B",
|
436 |
+
"model_sha": "2dadd4492f0b27c302d8a5518003fa6045e32a8a",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4n3mone/KoSOLAR_merge_test_v0.1/result_2024-02-21 07:42:24.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.27047781569965873,
|
5 |
+
"acc_stderr": 0.012980954547659556,
|
6 |
+
"acc_norm": 0.3302047781569966,
|
7 |
+
"acc_norm_stderr": 0.013743085603760424
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.27763393746265685,
|
11 |
+
"acc_stderr": 0.004469165728600334,
|
12 |
+
"acc_norm": 0.31607249551882094,
|
13 |
+
"acc_norm_stderr": 0.004639913709615934
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4269005847953216,
|
17 |
+
"acc_stderr": 0.03793620616529916,
|
18 |
+
"acc_norm": 0.4269005847953216,
|
19 |
+
"acc_norm_stderr": 0.03793620616529916
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.3592233009708738,
|
23 |
+
"acc_stderr": 0.047504583990416925,
|
24 |
+
"acc_norm": 0.3592233009708738,
|
25 |
+
"acc_norm_stderr": 0.047504583990416925
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.421455938697318,
|
29 |
+
"acc_stderr": 0.017657976412654857,
|
30 |
+
"acc_norm": 0.421455938697318,
|
31 |
+
"acc_norm_stderr": 0.017657976412654857
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.3851851851851852,
|
35 |
+
"acc_stderr": 0.042039210401562783,
|
36 |
+
"acc_norm": 0.3851851851851852,
|
37 |
+
"acc_norm_stderr": 0.042039210401562783
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.31,
|
41 |
+
"acc_stderr": 0.04648231987117316,
|
42 |
+
"acc_norm": 0.31,
|
43 |
+
"acc_norm_stderr": 0.04648231987117316
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.28936170212765955,
|
47 |
+
"acc_stderr": 0.02964400657700962,
|
48 |
+
"acc_norm": 0.28936170212765955,
|
49 |
+
"acc_norm_stderr": 0.02964400657700962
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3493975903614458,
|
53 |
+
"acc_stderr": 0.037117251907407535,
|
54 |
+
"acc_norm": 0.3493975903614458,
|
55 |
+
"acc_norm_stderr": 0.037117251907407535
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.4212218649517685,
|
59 |
+
"acc_stderr": 0.028043399858210635,
|
60 |
+
"acc_norm": 0.4212218649517685,
|
61 |
+
"acc_norm_stderr": 0.028043399858210635
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.336322869955157,
|
65 |
+
"acc_stderr": 0.031708824268455005,
|
66 |
+
"acc_norm": 0.336322869955157,
|
67 |
+
"acc_norm_stderr": 0.031708824268455005
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.31297709923664124,
|
71 |
+
"acc_stderr": 0.04066962905677697,
|
72 |
+
"acc_norm": 0.31297709923664124,
|
73 |
+
"acc_norm_stderr": 0.04066962905677697
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.31,
|
77 |
+
"acc_stderr": 0.04648231987117316,
|
78 |
+
"acc_norm": 0.31,
|
79 |
+
"acc_norm_stderr": 0.04648231987117316
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.40404040404040403,
|
83 |
+
"acc_stderr": 0.03496130972056128,
|
84 |
+
"acc_norm": 0.40404040404040403,
|
85 |
+
"acc_norm_stderr": 0.03496130972056128
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.31724137931034485,
|
89 |
+
"acc_stderr": 0.038783523721386215,
|
90 |
+
"acc_norm": 0.31724137931034485,
|
91 |
+
"acc_norm_stderr": 0.038783523721386215
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.19607843137254902,
|
95 |
+
"acc_stderr": 0.039505818611799616,
|
96 |
+
"acc_norm": 0.19607843137254902,
|
97 |
+
"acc_norm_stderr": 0.039505818611799616
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.3235294117647059,
|
101 |
+
"acc_stderr": 0.03038835355188685,
|
102 |
+
"acc_norm": 0.3235294117647059,
|
103 |
+
"acc_norm_stderr": 0.03038835355188685
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.24615384615384617,
|
107 |
+
"acc_stderr": 0.021840866990423077,
|
108 |
+
"acc_norm": 0.24615384615384617,
|
109 |
+
"acc_norm_stderr": 0.021840866990423077
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.42,
|
113 |
+
"acc_stderr": 0.04960449637488584,
|
114 |
+
"acc_norm": 0.42,
|
115 |
+
"acc_norm_stderr": 0.04960449637488584
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.34,
|
119 |
+
"acc_stderr": 0.04760952285695236,
|
120 |
+
"acc_norm": 0.34,
|
121 |
+
"acc_norm_stderr": 0.04760952285695236
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.3148148148148148,
|
125 |
+
"acc_stderr": 0.04489931073591312,
|
126 |
+
"acc_norm": 0.3148148148148148,
|
127 |
+
"acc_norm_stderr": 0.04489931073591312
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35960591133004927,
|
131 |
+
"acc_stderr": 0.03376458246509567,
|
132 |
+
"acc_norm": 0.35960591133004927,
|
133 |
+
"acc_norm_stderr": 0.03376458246509567
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.36129032258064514,
|
137 |
+
"acc_stderr": 0.02732754844795755,
|
138 |
+
"acc_norm": 0.36129032258064514,
|
139 |
+
"acc_norm_stderr": 0.02732754844795755
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.44017094017094016,
|
143 |
+
"acc_stderr": 0.032520741720630506,
|
144 |
+
"acc_norm": 0.44017094017094016,
|
145 |
+
"acc_norm_stderr": 0.032520741720630506
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.30943396226415093,
|
149 |
+
"acc_stderr": 0.028450154794118627,
|
150 |
+
"acc_norm": 0.30943396226415093,
|
151 |
+
"acc_norm_stderr": 0.028450154794118627
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.38181818181818183,
|
155 |
+
"acc_stderr": 0.046534298079135075,
|
156 |
+
"acc_norm": 0.38181818181818183,
|
157 |
+
"acc_norm_stderr": 0.046534298079135075
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.23703703703703705,
|
161 |
+
"acc_stderr": 0.02592887613276613,
|
162 |
+
"acc_norm": 0.23703703703703705,
|
163 |
+
"acc_norm_stderr": 0.02592887613276613
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.1986754966887417,
|
167 |
+
"acc_stderr": 0.032578473844367746,
|
168 |
+
"acc_norm": 0.1986754966887417,
|
169 |
+
"acc_norm_stderr": 0.032578473844367746
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.3333333333333333,
|
173 |
+
"acc_stderr": 0.03333333333333336,
|
174 |
+
"acc_norm": 0.3333333333333333,
|
175 |
+
"acc_norm_stderr": 0.03333333333333336
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.2832369942196532,
|
179 |
+
"acc_stderr": 0.03435568056047873,
|
180 |
+
"acc_norm": 0.2832369942196532,
|
181 |
+
"acc_norm_stderr": 0.03435568056047873
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.24338624338624337,
|
185 |
+
"acc_stderr": 0.022101128787415415,
|
186 |
+
"acc_norm": 0.24338624338624337,
|
187 |
+
"acc_norm_stderr": 0.022101128787415415
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.24305555555555555,
|
191 |
+
"acc_stderr": 0.03586879280080341,
|
192 |
+
"acc_norm": 0.24305555555555555,
|
193 |
+
"acc_norm_stderr": 0.03586879280080341
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.26,
|
197 |
+
"acc_stderr": 0.0440844002276808,
|
198 |
+
"acc_norm": 0.26,
|
199 |
+
"acc_norm_stderr": 0.0440844002276808
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.48,
|
203 |
+
"acc_stderr": 0.050211673156867795,
|
204 |
+
"acc_norm": 0.48,
|
205 |
+
"acc_norm_stderr": 0.050211673156867795
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.33815028901734107,
|
209 |
+
"acc_stderr": 0.02546977014940017,
|
210 |
+
"acc_norm": 0.33815028901734107,
|
211 |
+
"acc_norm_stderr": 0.02546977014940017
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.34355828220858897,
|
215 |
+
"acc_stderr": 0.03731133519673893,
|
216 |
+
"acc_norm": 0.34355828220858897,
|
217 |
+
"acc_norm_stderr": 0.03731133519673893
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.3611111111111111,
|
221 |
+
"acc_stderr": 0.02672586880910079,
|
222 |
+
"acc_norm": 0.3611111111111111,
|
223 |
+
"acc_norm_stderr": 0.02672586880910079
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.24,
|
227 |
+
"acc_stderr": 0.042923469599092816,
|
228 |
+
"acc_norm": 0.24,
|
229 |
+
"acc_norm_stderr": 0.042923469599092816
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.31088082901554404,
|
233 |
+
"acc_stderr": 0.03340361906276586,
|
234 |
+
"acc_norm": 0.31088082901554404,
|
235 |
+
"acc_norm_stderr": 0.03340361906276586
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.23684210526315788,
|
239 |
+
"acc_stderr": 0.03999423879281335,
|
240 |
+
"acc_norm": 0.23684210526315788,
|
241 |
+
"acc_norm_stderr": 0.03999423879281335
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3559633027522936,
|
245 |
+
"acc_stderr": 0.020528559278244214,
|
246 |
+
"acc_norm": 0.3559633027522936,
|
247 |
+
"acc_norm_stderr": 0.020528559278244214
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.1984126984126984,
|
251 |
+
"acc_stderr": 0.03567016675276862,
|
252 |
+
"acc_norm": 0.1984126984126984,
|
253 |
+
"acc_norm_stderr": 0.03567016675276862
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.3627450980392157,
|
257 |
+
"acc_stderr": 0.0275300784471103,
|
258 |
+
"acc_norm": 0.3627450980392157,
|
259 |
+
"acc_norm_stderr": 0.0275300784471103
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.31,
|
263 |
+
"acc_stderr": 0.04648231987117316,
|
264 |
+
"acc_norm": 0.31,
|
265 |
+
"acc_norm_stderr": 0.04648231987117316
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.47107438016528924,
|
269 |
+
"acc_stderr": 0.04556710331269498,
|
270 |
+
"acc_norm": 0.47107438016528924,
|
271 |
+
"acc_norm_stderr": 0.04556710331269498
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4144736842105263,
|
275 |
+
"acc_stderr": 0.04008973785779205,
|
276 |
+
"acc_norm": 0.4144736842105263,
|
277 |
+
"acc_norm_stderr": 0.04008973785779205
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3431372549019608,
|
281 |
+
"acc_stderr": 0.019206606848825365,
|
282 |
+
"acc_norm": 0.3431372549019608,
|
283 |
+
"acc_norm_stderr": 0.019206606848825365
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.25886524822695034,
|
287 |
+
"acc_stderr": 0.026129572527180848,
|
288 |
+
"acc_norm": 0.25886524822695034,
|
289 |
+
"acc_norm_stderr": 0.026129572527180848
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.29464285714285715,
|
293 |
+
"acc_stderr": 0.043270409325787296,
|
294 |
+
"acc_norm": 0.29464285714285715,
|
295 |
+
"acc_norm_stderr": 0.043270409325787296
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.19444444444444445,
|
299 |
+
"acc_stderr": 0.026991454502036744,
|
300 |
+
"acc_norm": 0.19444444444444445,
|
301 |
+
"acc_norm_stderr": 0.026991454502036744
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2346368715083799,
|
305 |
+
"acc_stderr": 0.014173044098303673,
|
306 |
+
"acc_norm": 0.2346368715083799,
|
307 |
+
"acc_norm_stderr": 0.014173044098303673
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.2,
|
311 |
+
"acc_stderr": 0.04020151261036845,
|
312 |
+
"acc_norm": 0.2,
|
313 |
+
"acc_norm_stderr": 0.04020151261036845
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.35,
|
317 |
+
"acc_stderr": 0.047937248544110196,
|
318 |
+
"acc_norm": 0.35,
|
319 |
+
"acc_norm_stderr": 0.047937248544110196
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.16176470588235295,
|
323 |
+
"acc_stderr": 0.02236867256288675,
|
324 |
+
"acc_norm": 0.16176470588235295,
|
325 |
+
"acc_norm_stderr": 0.02236867256288675
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.2693877551020408,
|
329 |
+
"acc_stderr": 0.02840125202902294,
|
330 |
+
"acc_norm": 0.2693877551020408,
|
331 |
+
"acc_norm_stderr": 0.02840125202902294
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.3459915611814346,
|
335 |
+
"acc_stderr": 0.030964810588786713,
|
336 |
+
"acc_norm": 0.3459915611814346,
|
337 |
+
"acc_norm_stderr": 0.030964810588786713
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2907431551499348,
|
341 |
+
"acc_stderr": 0.011598062372851988,
|
342 |
+
"acc_norm": 0.2907431551499348,
|
343 |
+
"acc_norm_stderr": 0.011598062372851988
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.3088235294117647,
|
347 |
+
"acc_stderr": 0.03242661719827218,
|
348 |
+
"acc_norm": 0.3088235294117647,
|
349 |
+
"acc_norm_stderr": 0.03242661719827218
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.34545454545454546,
|
353 |
+
"acc_stderr": 0.03713158067481913,
|
354 |
+
"acc_norm": 0.34545454545454546,
|
355 |
+
"acc_norm_stderr": 0.03713158067481913
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.29498164014687883,
|
359 |
+
"mc1_stderr": 0.015964400965589667,
|
360 |
+
"mc2": 0.536422656873522,
|
361 |
+
"mc2_stderr": 0.016939809819665783
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.1641086186540732,
|
365 |
+
"acc_stderr": 0.012733724137996926,
|
366 |
+
"acc_norm": 0.4639905548996458,
|
367 |
+
"acc_norm_stderr": 0.017145715365486654
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4n3mone/KoSOLAR_merge_test_v0.1",
|
436 |
+
"model_sha": "5bb8e2693638d22227dff2b59b1f06dbbe59cc9a",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama/result_2024-07-10 12:39:52.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3720136518771331,
|
5 |
+
"acc_stderr": 0.014124597881844461,
|
6 |
+
"acc_norm": 0.4325938566552901,
|
7 |
+
"acc_norm_stderr": 0.014478005694182531
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3584943238398725,
|
11 |
+
"acc_stderr": 0.004785781979354873,
|
12 |
+
"acc_norm": 0.46564429396534557,
|
13 |
+
"acc_norm_stderr": 0.00497798845250264
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.543859649122807,
|
17 |
+
"acc_stderr": 0.03820042586602966,
|
18 |
+
"acc_norm": 0.543859649122807,
|
19 |
+
"acc_norm_stderr": 0.03820042586602966
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5728155339805825,
|
23 |
+
"acc_stderr": 0.04897957737781168,
|
24 |
+
"acc_norm": 0.5728155339805825,
|
25 |
+
"acc_norm_stderr": 0.04897957737781168
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.4482758620689655,
|
29 |
+
"acc_stderr": 0.017784034534992457,
|
30 |
+
"acc_norm": 0.4482758620689655,
|
31 |
+
"acc_norm_stderr": 0.017784034534992457
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.362962962962963,
|
35 |
+
"acc_stderr": 0.04153948404742399,
|
36 |
+
"acc_norm": 0.362962962962963,
|
37 |
+
"acc_norm_stderr": 0.04153948404742399
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4978723404255319,
|
47 |
+
"acc_stderr": 0.032685726586674915,
|
48 |
+
"acc_norm": 0.4978723404255319,
|
49 |
+
"acc_norm_stderr": 0.032685726586674915
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3674698795180723,
|
53 |
+
"acc_stderr": 0.03753267402120574,
|
54 |
+
"acc_norm": 0.3674698795180723,
|
55 |
+
"acc_norm_stderr": 0.03753267402120574
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.5369774919614148,
|
59 |
+
"acc_stderr": 0.028320325830105915,
|
60 |
+
"acc_norm": 0.5369774919614148,
|
61 |
+
"acc_norm_stderr": 0.028320325830105915
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.49327354260089684,
|
65 |
+
"acc_stderr": 0.033554765962343545,
|
66 |
+
"acc_norm": 0.49327354260089684,
|
67 |
+
"acc_norm_stderr": 0.033554765962343545
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5190839694656488,
|
71 |
+
"acc_stderr": 0.04382094705550988,
|
72 |
+
"acc_norm": 0.5190839694656488,
|
73 |
+
"acc_norm_stderr": 0.04382094705550988
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.46,
|
77 |
+
"acc_stderr": 0.05009082659620332,
|
78 |
+
"acc_norm": 0.46,
|
79 |
+
"acc_norm_stderr": 0.05009082659620332
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.5202020202020202,
|
83 |
+
"acc_stderr": 0.035594435655639196,
|
84 |
+
"acc_norm": 0.5202020202020202,
|
85 |
+
"acc_norm_stderr": 0.035594435655639196
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5655172413793104,
|
89 |
+
"acc_stderr": 0.04130740879555498,
|
90 |
+
"acc_norm": 0.5655172413793104,
|
91 |
+
"acc_norm_stderr": 0.04130740879555498
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3333333333333333,
|
95 |
+
"acc_stderr": 0.04690650298201942,
|
96 |
+
"acc_norm": 0.3333333333333333,
|
97 |
+
"acc_norm_stderr": 0.04690650298201942
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5294117647058824,
|
101 |
+
"acc_stderr": 0.032422250271150053,
|
102 |
+
"acc_norm": 0.5294117647058824,
|
103 |
+
"acc_norm_stderr": 0.032422250271150053
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.5076923076923077,
|
107 |
+
"acc_stderr": 0.02534800603153475,
|
108 |
+
"acc_norm": 0.5076923076923077,
|
109 |
+
"acc_norm_stderr": 0.02534800603153475
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.58,
|
113 |
+
"acc_stderr": 0.04960449637488583,
|
114 |
+
"acc_norm": 0.58,
|
115 |
+
"acc_norm_stderr": 0.04960449637488583
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.37,
|
119 |
+
"acc_stderr": 0.04852365870939099,
|
120 |
+
"acc_norm": 0.37,
|
121 |
+
"acc_norm_stderr": 0.04852365870939099
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.6111111111111112,
|
125 |
+
"acc_stderr": 0.0471282125742677,
|
126 |
+
"acc_norm": 0.6111111111111112,
|
127 |
+
"acc_norm_stderr": 0.0471282125742677
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.46798029556650245,
|
131 |
+
"acc_stderr": 0.035107665979592154,
|
132 |
+
"acc_norm": 0.46798029556650245,
|
133 |
+
"acc_norm_stderr": 0.035107665979592154
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.532258064516129,
|
137 |
+
"acc_stderr": 0.028384747788813336,
|
138 |
+
"acc_norm": 0.532258064516129,
|
139 |
+
"acc_norm_stderr": 0.028384747788813336
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.7521367521367521,
|
143 |
+
"acc_stderr": 0.028286324075564414,
|
144 |
+
"acc_norm": 0.7521367521367521,
|
145 |
+
"acc_norm_stderr": 0.028286324075564414
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.5320754716981132,
|
149 |
+
"acc_stderr": 0.030709486992556552,
|
150 |
+
"acc_norm": 0.5320754716981132,
|
151 |
+
"acc_norm_stderr": 0.030709486992556552
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.5181818181818182,
|
155 |
+
"acc_stderr": 0.04785964010794915,
|
156 |
+
"acc_norm": 0.5181818181818182,
|
157 |
+
"acc_norm_stderr": 0.04785964010794915
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.37407407407407406,
|
161 |
+
"acc_stderr": 0.02950286112895529,
|
162 |
+
"acc_norm": 0.37407407407407406,
|
163 |
+
"acc_norm_stderr": 0.02950286112895529
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.39072847682119205,
|
167 |
+
"acc_stderr": 0.03983798306659809,
|
168 |
+
"acc_norm": 0.39072847682119205,
|
169 |
+
"acc_norm_stderr": 0.03983798306659809
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.6567164179104478,
|
173 |
+
"acc_stderr": 0.03357379665433431,
|
174 |
+
"acc_norm": 0.6567164179104478,
|
175 |
+
"acc_norm_stderr": 0.03357379665433431
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4508670520231214,
|
179 |
+
"acc_stderr": 0.03794012674697029,
|
180 |
+
"acc_norm": 0.4508670520231214,
|
181 |
+
"acc_norm_stderr": 0.03794012674697029
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3835978835978836,
|
185 |
+
"acc_stderr": 0.025043757318520203,
|
186 |
+
"acc_norm": 0.3835978835978836,
|
187 |
+
"acc_norm_stderr": 0.025043757318520203
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4861111111111111,
|
191 |
+
"acc_stderr": 0.04179596617581,
|
192 |
+
"acc_norm": 0.4861111111111111,
|
193 |
+
"acc_norm_stderr": 0.04179596617581
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.39,
|
197 |
+
"acc_stderr": 0.04902071300001975,
|
198 |
+
"acc_norm": 0.39,
|
199 |
+
"acc_norm_stderr": 0.04902071300001975
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.69,
|
203 |
+
"acc_stderr": 0.04648231987117316,
|
204 |
+
"acc_norm": 0.69,
|
205 |
+
"acc_norm_stderr": 0.04648231987117316
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.5404624277456648,
|
209 |
+
"acc_stderr": 0.02683080599895224,
|
210 |
+
"acc_norm": 0.5404624277456648,
|
211 |
+
"acc_norm_stderr": 0.02683080599895224
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.43558282208588955,
|
215 |
+
"acc_stderr": 0.03895632464138936,
|
216 |
+
"acc_norm": 0.43558282208588955,
|
217 |
+
"acc_norm_stderr": 0.03895632464138936
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.5246913580246914,
|
221 |
+
"acc_stderr": 0.027786800931427443,
|
222 |
+
"acc_norm": 0.5246913580246914,
|
223 |
+
"acc_norm_stderr": 0.027786800931427443
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.34,
|
227 |
+
"acc_stderr": 0.047609522856952344,
|
228 |
+
"acc_norm": 0.34,
|
229 |
+
"acc_norm_stderr": 0.047609522856952344
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5647668393782384,
|
233 |
+
"acc_stderr": 0.03578038165008585,
|
234 |
+
"acc_norm": 0.5647668393782384,
|
235 |
+
"acc_norm_stderr": 0.03578038165008585
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.37719298245614036,
|
239 |
+
"acc_stderr": 0.04559522141958216,
|
240 |
+
"acc_norm": 0.37719298245614036,
|
241 |
+
"acc_norm_stderr": 0.04559522141958216
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5688073394495413,
|
245 |
+
"acc_stderr": 0.021233365030319567,
|
246 |
+
"acc_norm": 0.5688073394495413,
|
247 |
+
"acc_norm_stderr": 0.021233365030319567
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.4444444444444444,
|
251 |
+
"acc_stderr": 0.044444444444444495,
|
252 |
+
"acc_norm": 0.4444444444444444,
|
253 |
+
"acc_norm_stderr": 0.044444444444444495
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.5294117647058824,
|
257 |
+
"acc_stderr": 0.028580341065138293,
|
258 |
+
"acc_norm": 0.5294117647058824,
|
259 |
+
"acc_norm_stderr": 0.028580341065138293
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.62,
|
263 |
+
"acc_stderr": 0.048783173121456316,
|
264 |
+
"acc_norm": 0.62,
|
265 |
+
"acc_norm_stderr": 0.048783173121456316
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.7024793388429752,
|
269 |
+
"acc_stderr": 0.04173349148083498,
|
270 |
+
"acc_norm": 0.7024793388429752,
|
271 |
+
"acc_norm_stderr": 0.04173349148083498
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.46710526315789475,
|
275 |
+
"acc_stderr": 0.040601270352363966,
|
276 |
+
"acc_norm": 0.46710526315789475,
|
277 |
+
"acc_norm_stderr": 0.040601270352363966
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.44607843137254904,
|
281 |
+
"acc_stderr": 0.02010986454718136,
|
282 |
+
"acc_norm": 0.44607843137254904,
|
283 |
+
"acc_norm_stderr": 0.02010986454718136
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.34397163120567376,
|
287 |
+
"acc_stderr": 0.028338017428611324,
|
288 |
+
"acc_norm": 0.34397163120567376,
|
289 |
+
"acc_norm_stderr": 0.028338017428611324
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.44642857142857145,
|
293 |
+
"acc_stderr": 0.047184714852195886,
|
294 |
+
"acc_norm": 0.44642857142857145,
|
295 |
+
"acc_norm_stderr": 0.047184714852195886
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4074074074074074,
|
299 |
+
"acc_stderr": 0.03350991604696044,
|
300 |
+
"acc_norm": 0.4074074074074074,
|
301 |
+
"acc_norm_stderr": 0.03350991604696044
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.3027932960893855,
|
305 |
+
"acc_stderr": 0.015366860386397114,
|
306 |
+
"acc_norm": 0.3027932960893855,
|
307 |
+
"acc_norm_stderr": 0.015366860386397114
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.45,
|
311 |
+
"acc_stderr": 0.04999999999999999,
|
312 |
+
"acc_norm": 0.45,
|
313 |
+
"acc_norm_stderr": 0.04999999999999999
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.7,
|
317 |
+
"acc_stderr": 0.04605661864718381,
|
318 |
+
"acc_norm": 0.7,
|
319 |
+
"acc_norm_stderr": 0.04605661864718381
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3860294117647059,
|
323 |
+
"acc_stderr": 0.029573269134411124,
|
324 |
+
"acc_norm": 0.3860294117647059,
|
325 |
+
"acc_norm_stderr": 0.029573269134411124
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.6326530612244898,
|
329 |
+
"acc_stderr": 0.03086214492108756,
|
330 |
+
"acc_norm": 0.6326530612244898,
|
331 |
+
"acc_norm_stderr": 0.03086214492108756
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.6413502109704642,
|
335 |
+
"acc_stderr": 0.03121956944530185,
|
336 |
+
"acc_norm": 0.6413502109704642,
|
337 |
+
"acc_norm_stderr": 0.03121956944530185
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.36897001303780963,
|
341 |
+
"acc_stderr": 0.012323936650174859,
|
342 |
+
"acc_norm": 0.36897001303780963,
|
343 |
+
"acc_norm_stderr": 0.012323936650174859
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.5392156862745098,
|
347 |
+
"acc_stderr": 0.03498501649369527,
|
348 |
+
"acc_norm": 0.5392156862745098,
|
349 |
+
"acc_norm_stderr": 0.03498501649369527
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.6545454545454545,
|
353 |
+
"acc_stderr": 0.037131580674819135,
|
354 |
+
"acc_norm": 0.6545454545454545,
|
355 |
+
"acc_norm_stderr": 0.037131580674819135
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.31334149326805383,
|
359 |
+
"mc1_stderr": 0.016238065069059622,
|
360 |
+
"mc2": 0.48760282451527875,
|
361 |
+
"mc2_stderr": 0.01585626318171357
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4781582054309327,
|
365 |
+
"acc_stderr": 0.01717394447429438,
|
366 |
+
"acc_norm": 0.5076741440377804,
|
367 |
+
"acc_norm_stderr": 0.017188329219654273
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama",
|
436 |
+
"model_sha": "2f7568051dbd3185c834cac527616af8378f9206",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8-llama/result_2024-07-02 09:08:08.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.25341296928327645,
|
5 |
+
"acc_stderr": 0.012710896778378602,
|
6 |
+
"acc_norm": 0.31313993174061433,
|
7 |
+
"acc_norm_stderr": 0.013552671543623492
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.32812188807010556,
|
11 |
+
"acc_stderr": 0.004685698752104808,
|
12 |
+
"acc_norm": 0.40928101971718783,
|
13 |
+
"acc_norm_stderr": 0.004906962980328288
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.42105263157894735,
|
17 |
+
"acc_stderr": 0.037867207062342145,
|
18 |
+
"acc_norm": 0.42105263157894735,
|
19 |
+
"acc_norm_stderr": 0.037867207062342145
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2912621359223301,
|
23 |
+
"acc_stderr": 0.044986763205729224,
|
24 |
+
"acc_norm": 0.2912621359223301,
|
25 |
+
"acc_norm_stderr": 0.044986763205729224
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.3614303959131545,
|
29 |
+
"acc_stderr": 0.01717960132890074,
|
30 |
+
"acc_norm": 0.3614303959131545,
|
31 |
+
"acc_norm_stderr": 0.01717960132890074
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.28888888888888886,
|
35 |
+
"acc_stderr": 0.0391545063041425,
|
36 |
+
"acc_norm": 0.28888888888888886,
|
37 |
+
"acc_norm_stderr": 0.0391545063041425
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.37,
|
41 |
+
"acc_stderr": 0.048523658709391,
|
42 |
+
"acc_norm": 0.37,
|
43 |
+
"acc_norm_stderr": 0.048523658709391
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3659574468085106,
|
47 |
+
"acc_stderr": 0.0314895582974553,
|
48 |
+
"acc_norm": 0.3659574468085106,
|
49 |
+
"acc_norm_stderr": 0.0314895582974553
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.28313253012048195,
|
53 |
+
"acc_stderr": 0.03507295431370518,
|
54 |
+
"acc_norm": 0.28313253012048195,
|
55 |
+
"acc_norm_stderr": 0.03507295431370518
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.33762057877813506,
|
59 |
+
"acc_stderr": 0.026858825879488544,
|
60 |
+
"acc_norm": 0.33762057877813506,
|
61 |
+
"acc_norm_stderr": 0.026858825879488544
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.34977578475336324,
|
65 |
+
"acc_stderr": 0.03200736719484503,
|
66 |
+
"acc_norm": 0.34977578475336324,
|
67 |
+
"acc_norm_stderr": 0.03200736719484503
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.32061068702290074,
|
71 |
+
"acc_stderr": 0.04093329229834278,
|
72 |
+
"acc_norm": 0.32061068702290074,
|
73 |
+
"acc_norm_stderr": 0.04093329229834278
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.36,
|
77 |
+
"acc_stderr": 0.04824181513244218,
|
78 |
+
"acc_norm": 0.36,
|
79 |
+
"acc_norm_stderr": 0.04824181513244218
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.3383838383838384,
|
83 |
+
"acc_stderr": 0.03371124142626303,
|
84 |
+
"acc_norm": 0.3383838383838384,
|
85 |
+
"acc_norm_stderr": 0.03371124142626303
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5379310344827586,
|
89 |
+
"acc_stderr": 0.04154659671707548,
|
90 |
+
"acc_norm": 0.5379310344827586,
|
91 |
+
"acc_norm_stderr": 0.04154659671707548
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.20588235294117646,
|
95 |
+
"acc_stderr": 0.04023382273617749,
|
96 |
+
"acc_norm": 0.20588235294117646,
|
97 |
+
"acc_norm_stderr": 0.04023382273617749
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.37815126050420167,
|
101 |
+
"acc_stderr": 0.031499305777849054,
|
102 |
+
"acc_norm": 0.37815126050420167,
|
103 |
+
"acc_norm_stderr": 0.031499305777849054
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.31794871794871793,
|
107 |
+
"acc_stderr": 0.02361088430892786,
|
108 |
+
"acc_norm": 0.31794871794871793,
|
109 |
+
"acc_norm_stderr": 0.02361088430892786
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.47,
|
113 |
+
"acc_stderr": 0.05016135580465919,
|
114 |
+
"acc_norm": 0.47,
|
115 |
+
"acc_norm_stderr": 0.05016135580465919
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.048241815132442176,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.048241815132442176
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.39814814814814814,
|
125 |
+
"acc_stderr": 0.04732332615978815,
|
126 |
+
"acc_norm": 0.39814814814814814,
|
127 |
+
"acc_norm_stderr": 0.04732332615978815
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3399014778325123,
|
131 |
+
"acc_stderr": 0.033327690684107895,
|
132 |
+
"acc_norm": 0.3399014778325123,
|
133 |
+
"acc_norm_stderr": 0.033327690684107895
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.38387096774193546,
|
137 |
+
"acc_stderr": 0.027666182075539635,
|
138 |
+
"acc_norm": 0.38387096774193546,
|
139 |
+
"acc_norm_stderr": 0.027666182075539635
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.5598290598290598,
|
143 |
+
"acc_stderr": 0.032520741720630506,
|
144 |
+
"acc_norm": 0.5598290598290598,
|
145 |
+
"acc_norm_stderr": 0.032520741720630506
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.39622641509433965,
|
149 |
+
"acc_stderr": 0.03010279378179119,
|
150 |
+
"acc_norm": 0.39622641509433965,
|
151 |
+
"acc_norm_stderr": 0.03010279378179119
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4,
|
155 |
+
"acc_stderr": 0.0469237132203465,
|
156 |
+
"acc_norm": 0.4,
|
157 |
+
"acc_norm_stderr": 0.0469237132203465
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.34444444444444444,
|
161 |
+
"acc_stderr": 0.028972648884844267,
|
162 |
+
"acc_norm": 0.34444444444444444,
|
163 |
+
"acc_norm_stderr": 0.028972648884844267
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2847682119205298,
|
167 |
+
"acc_stderr": 0.03684881521389023,
|
168 |
+
"acc_norm": 0.2847682119205298,
|
169 |
+
"acc_norm_stderr": 0.03684881521389023
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.40298507462686567,
|
173 |
+
"acc_stderr": 0.034683432951111266,
|
174 |
+
"acc_norm": 0.40298507462686567,
|
175 |
+
"acc_norm_stderr": 0.034683432951111266
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.31213872832369943,
|
179 |
+
"acc_stderr": 0.035331333893236574,
|
180 |
+
"acc_norm": 0.31213872832369943,
|
181 |
+
"acc_norm_stderr": 0.035331333893236574
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3253968253968254,
|
185 |
+
"acc_stderr": 0.024130158299762613,
|
186 |
+
"acc_norm": 0.3253968253968254,
|
187 |
+
"acc_norm_stderr": 0.024130158299762613
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2916666666666667,
|
191 |
+
"acc_stderr": 0.03800968060554858,
|
192 |
+
"acc_norm": 0.2916666666666667,
|
193 |
+
"acc_norm_stderr": 0.03800968060554858
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.25,
|
197 |
+
"acc_stderr": 0.04351941398892446,
|
198 |
+
"acc_norm": 0.25,
|
199 |
+
"acc_norm_stderr": 0.04351941398892446
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.48,
|
203 |
+
"acc_stderr": 0.050211673156867795,
|
204 |
+
"acc_norm": 0.48,
|
205 |
+
"acc_norm_stderr": 0.050211673156867795
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.3352601156069364,
|
209 |
+
"acc_stderr": 0.025416003773165555,
|
210 |
+
"acc_norm": 0.3352601156069364,
|
211 |
+
"acc_norm_stderr": 0.025416003773165555
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3558282208588957,
|
215 |
+
"acc_stderr": 0.03761521380046734,
|
216 |
+
"acc_norm": 0.3558282208588957,
|
217 |
+
"acc_norm_stderr": 0.03761521380046734
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.3425925925925926,
|
221 |
+
"acc_stderr": 0.026406145973625672,
|
222 |
+
"acc_norm": 0.3425925925925926,
|
223 |
+
"acc_norm_stderr": 0.026406145973625672
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.35751295336787564,
|
233 |
+
"acc_stderr": 0.03458816042181006,
|
234 |
+
"acc_norm": 0.35751295336787564,
|
235 |
+
"acc_norm_stderr": 0.03458816042181006
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.04227054451232199,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.04227054451232199
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3467889908256881,
|
245 |
+
"acc_stderr": 0.020406097104093024,
|
246 |
+
"acc_norm": 0.3467889908256881,
|
247 |
+
"acc_norm_stderr": 0.020406097104093024
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2698412698412698,
|
251 |
+
"acc_stderr": 0.03970158273235172,
|
252 |
+
"acc_norm": 0.2698412698412698,
|
253 |
+
"acc_norm_stderr": 0.03970158273235172
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.38562091503267976,
|
257 |
+
"acc_stderr": 0.027870745278290313,
|
258 |
+
"acc_norm": 0.38562091503267976,
|
259 |
+
"acc_norm_stderr": 0.027870745278290313
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.4,
|
263 |
+
"acc_stderr": 0.049236596391733084,
|
264 |
+
"acc_norm": 0.4,
|
265 |
+
"acc_norm_stderr": 0.049236596391733084
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.49586776859504134,
|
269 |
+
"acc_stderr": 0.04564198767432754,
|
270 |
+
"acc_norm": 0.49586776859504134,
|
271 |
+
"acc_norm_stderr": 0.04564198767432754
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.2565789473684211,
|
275 |
+
"acc_stderr": 0.0355418036802569,
|
276 |
+
"acc_norm": 0.2565789473684211,
|
277 |
+
"acc_norm_stderr": 0.0355418036802569
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.31209150326797386,
|
281 |
+
"acc_stderr": 0.018745011201277657,
|
282 |
+
"acc_norm": 0.31209150326797386,
|
283 |
+
"acc_norm_stderr": 0.018745011201277657
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2695035460992908,
|
287 |
+
"acc_stderr": 0.026469036818590634,
|
288 |
+
"acc_norm": 0.2695035460992908,
|
289 |
+
"acc_norm_stderr": 0.026469036818590634
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.35714285714285715,
|
293 |
+
"acc_stderr": 0.04547960999764376,
|
294 |
+
"acc_norm": 0.35714285714285715,
|
295 |
+
"acc_norm_stderr": 0.04547960999764376
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3101851851851852,
|
299 |
+
"acc_stderr": 0.031546962856566295,
|
300 |
+
"acc_norm": 0.3101851851851852,
|
301 |
+
"acc_norm_stderr": 0.031546962856566295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.23687150837988827,
|
305 |
+
"acc_stderr": 0.014219570788103982,
|
306 |
+
"acc_norm": 0.23687150837988827,
|
307 |
+
"acc_norm_stderr": 0.014219570788103982
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.33,
|
311 |
+
"acc_stderr": 0.04725815626252606,
|
312 |
+
"acc_norm": 0.33,
|
313 |
+
"acc_norm_stderr": 0.04725815626252606
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.41,
|
317 |
+
"acc_stderr": 0.04943110704237102,
|
318 |
+
"acc_norm": 0.41,
|
319 |
+
"acc_norm_stderr": 0.04943110704237102
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3382352941176471,
|
323 |
+
"acc_stderr": 0.028739328513983583,
|
324 |
+
"acc_norm": 0.3382352941176471,
|
325 |
+
"acc_norm_stderr": 0.028739328513983583
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.21224489795918366,
|
329 |
+
"acc_stderr": 0.026176967197866764,
|
330 |
+
"acc_norm": 0.21224489795918366,
|
331 |
+
"acc_norm_stderr": 0.026176967197866764
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.4430379746835443,
|
335 |
+
"acc_stderr": 0.03233532777533484,
|
336 |
+
"acc_norm": 0.4430379746835443,
|
337 |
+
"acc_norm_stderr": 0.03233532777533484
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2803129074315515,
|
341 |
+
"acc_stderr": 0.01147155594495862,
|
342 |
+
"acc_norm": 0.2803129074315515,
|
343 |
+
"acc_norm_stderr": 0.01147155594495862
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.31862745098039214,
|
347 |
+
"acc_stderr": 0.0327028718148208,
|
348 |
+
"acc_norm": 0.31862745098039214,
|
349 |
+
"acc_norm_stderr": 0.0327028718148208
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3151515151515151,
|
353 |
+
"acc_stderr": 0.0362773057502241,
|
354 |
+
"acc_norm": 0.3151515151515151,
|
355 |
+
"acc_norm_stderr": 0.0362773057502241
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2913096695226438,
|
359 |
+
"mc1_stderr": 0.01590598704818483,
|
360 |
+
"mc2": 0.44739358713372257,
|
361 |
+
"mc2_stderr": 0.015551996419791592
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2987012987012987,
|
365 |
+
"acc_stderr": 0.01573565739143828,
|
366 |
+
"acc_norm": 0.4085005903187721,
|
367 |
+
"acc_norm_stderr": 0.01690006287942712
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8-llama",
|
436 |
+
"model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8/result_2024-07-02 08:52:49.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.25341296928327645,
|
5 |
+
"acc_stderr": 0.012710896778378602,
|
6 |
+
"acc_norm": 0.31313993174061433,
|
7 |
+
"acc_norm_stderr": 0.013552671543623492
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.32812188807010556,
|
11 |
+
"acc_stderr": 0.004685698752104808,
|
12 |
+
"acc_norm": 0.40928101971718783,
|
13 |
+
"acc_norm_stderr": 0.004906962980328288
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.42105263157894735,
|
17 |
+
"acc_stderr": 0.037867207062342145,
|
18 |
+
"acc_norm": 0.42105263157894735,
|
19 |
+
"acc_norm_stderr": 0.037867207062342145
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2912621359223301,
|
23 |
+
"acc_stderr": 0.044986763205729224,
|
24 |
+
"acc_norm": 0.2912621359223301,
|
25 |
+
"acc_norm_stderr": 0.044986763205729224
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.3614303959131545,
|
29 |
+
"acc_stderr": 0.01717960132890074,
|
30 |
+
"acc_norm": 0.3614303959131545,
|
31 |
+
"acc_norm_stderr": 0.01717960132890074
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.28888888888888886,
|
35 |
+
"acc_stderr": 0.0391545063041425,
|
36 |
+
"acc_norm": 0.28888888888888886,
|
37 |
+
"acc_norm_stderr": 0.0391545063041425
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.37,
|
41 |
+
"acc_stderr": 0.048523658709391,
|
42 |
+
"acc_norm": 0.37,
|
43 |
+
"acc_norm_stderr": 0.048523658709391
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3659574468085106,
|
47 |
+
"acc_stderr": 0.0314895582974553,
|
48 |
+
"acc_norm": 0.3659574468085106,
|
49 |
+
"acc_norm_stderr": 0.0314895582974553
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.28313253012048195,
|
53 |
+
"acc_stderr": 0.03507295431370518,
|
54 |
+
"acc_norm": 0.28313253012048195,
|
55 |
+
"acc_norm_stderr": 0.03507295431370518
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.33762057877813506,
|
59 |
+
"acc_stderr": 0.026858825879488544,
|
60 |
+
"acc_norm": 0.33762057877813506,
|
61 |
+
"acc_norm_stderr": 0.026858825879488544
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.34977578475336324,
|
65 |
+
"acc_stderr": 0.03200736719484503,
|
66 |
+
"acc_norm": 0.34977578475336324,
|
67 |
+
"acc_norm_stderr": 0.03200736719484503
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.32061068702290074,
|
71 |
+
"acc_stderr": 0.04093329229834278,
|
72 |
+
"acc_norm": 0.32061068702290074,
|
73 |
+
"acc_norm_stderr": 0.04093329229834278
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.36,
|
77 |
+
"acc_stderr": 0.04824181513244218,
|
78 |
+
"acc_norm": 0.36,
|
79 |
+
"acc_norm_stderr": 0.04824181513244218
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.3383838383838384,
|
83 |
+
"acc_stderr": 0.03371124142626303,
|
84 |
+
"acc_norm": 0.3383838383838384,
|
85 |
+
"acc_norm_stderr": 0.03371124142626303
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5379310344827586,
|
89 |
+
"acc_stderr": 0.04154659671707548,
|
90 |
+
"acc_norm": 0.5379310344827586,
|
91 |
+
"acc_norm_stderr": 0.04154659671707548
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.20588235294117646,
|
95 |
+
"acc_stderr": 0.04023382273617749,
|
96 |
+
"acc_norm": 0.20588235294117646,
|
97 |
+
"acc_norm_stderr": 0.04023382273617749
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.37815126050420167,
|
101 |
+
"acc_stderr": 0.031499305777849054,
|
102 |
+
"acc_norm": 0.37815126050420167,
|
103 |
+
"acc_norm_stderr": 0.031499305777849054
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.31794871794871793,
|
107 |
+
"acc_stderr": 0.02361088430892786,
|
108 |
+
"acc_norm": 0.31794871794871793,
|
109 |
+
"acc_norm_stderr": 0.02361088430892786
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.47,
|
113 |
+
"acc_stderr": 0.05016135580465919,
|
114 |
+
"acc_norm": 0.47,
|
115 |
+
"acc_norm_stderr": 0.05016135580465919
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.048241815132442176,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.048241815132442176
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.39814814814814814,
|
125 |
+
"acc_stderr": 0.04732332615978815,
|
126 |
+
"acc_norm": 0.39814814814814814,
|
127 |
+
"acc_norm_stderr": 0.04732332615978815
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3399014778325123,
|
131 |
+
"acc_stderr": 0.033327690684107895,
|
132 |
+
"acc_norm": 0.3399014778325123,
|
133 |
+
"acc_norm_stderr": 0.033327690684107895
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.38387096774193546,
|
137 |
+
"acc_stderr": 0.027666182075539635,
|
138 |
+
"acc_norm": 0.38387096774193546,
|
139 |
+
"acc_norm_stderr": 0.027666182075539635
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.5598290598290598,
|
143 |
+
"acc_stderr": 0.032520741720630506,
|
144 |
+
"acc_norm": 0.5598290598290598,
|
145 |
+
"acc_norm_stderr": 0.032520741720630506
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.39622641509433965,
|
149 |
+
"acc_stderr": 0.03010279378179119,
|
150 |
+
"acc_norm": 0.39622641509433965,
|
151 |
+
"acc_norm_stderr": 0.03010279378179119
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4,
|
155 |
+
"acc_stderr": 0.0469237132203465,
|
156 |
+
"acc_norm": 0.4,
|
157 |
+
"acc_norm_stderr": 0.0469237132203465
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.34444444444444444,
|
161 |
+
"acc_stderr": 0.028972648884844267,
|
162 |
+
"acc_norm": 0.34444444444444444,
|
163 |
+
"acc_norm_stderr": 0.028972648884844267
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2847682119205298,
|
167 |
+
"acc_stderr": 0.03684881521389023,
|
168 |
+
"acc_norm": 0.2847682119205298,
|
169 |
+
"acc_norm_stderr": 0.03684881521389023
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.40298507462686567,
|
173 |
+
"acc_stderr": 0.034683432951111266,
|
174 |
+
"acc_norm": 0.40298507462686567,
|
175 |
+
"acc_norm_stderr": 0.034683432951111266
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.31213872832369943,
|
179 |
+
"acc_stderr": 0.035331333893236574,
|
180 |
+
"acc_norm": 0.31213872832369943,
|
181 |
+
"acc_norm_stderr": 0.035331333893236574
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3253968253968254,
|
185 |
+
"acc_stderr": 0.024130158299762613,
|
186 |
+
"acc_norm": 0.3253968253968254,
|
187 |
+
"acc_norm_stderr": 0.024130158299762613
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2916666666666667,
|
191 |
+
"acc_stderr": 0.03800968060554858,
|
192 |
+
"acc_norm": 0.2916666666666667,
|
193 |
+
"acc_norm_stderr": 0.03800968060554858
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.25,
|
197 |
+
"acc_stderr": 0.04351941398892446,
|
198 |
+
"acc_norm": 0.25,
|
199 |
+
"acc_norm_stderr": 0.04351941398892446
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.48,
|
203 |
+
"acc_stderr": 0.050211673156867795,
|
204 |
+
"acc_norm": 0.48,
|
205 |
+
"acc_norm_stderr": 0.050211673156867795
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.3352601156069364,
|
209 |
+
"acc_stderr": 0.025416003773165555,
|
210 |
+
"acc_norm": 0.3352601156069364,
|
211 |
+
"acc_norm_stderr": 0.025416003773165555
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3558282208588957,
|
215 |
+
"acc_stderr": 0.03761521380046734,
|
216 |
+
"acc_norm": 0.3558282208588957,
|
217 |
+
"acc_norm_stderr": 0.03761521380046734
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.3425925925925926,
|
221 |
+
"acc_stderr": 0.026406145973625672,
|
222 |
+
"acc_norm": 0.3425925925925926,
|
223 |
+
"acc_norm_stderr": 0.026406145973625672
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.35751295336787564,
|
233 |
+
"acc_stderr": 0.03458816042181006,
|
234 |
+
"acc_norm": 0.35751295336787564,
|
235 |
+
"acc_norm_stderr": 0.03458816042181006
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.04227054451232199,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.04227054451232199
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3467889908256881,
|
245 |
+
"acc_stderr": 0.020406097104093024,
|
246 |
+
"acc_norm": 0.3467889908256881,
|
247 |
+
"acc_norm_stderr": 0.020406097104093024
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2698412698412698,
|
251 |
+
"acc_stderr": 0.03970158273235172,
|
252 |
+
"acc_norm": 0.2698412698412698,
|
253 |
+
"acc_norm_stderr": 0.03970158273235172
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.38562091503267976,
|
257 |
+
"acc_stderr": 0.027870745278290313,
|
258 |
+
"acc_norm": 0.38562091503267976,
|
259 |
+
"acc_norm_stderr": 0.027870745278290313
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.4,
|
263 |
+
"acc_stderr": 0.049236596391733084,
|
264 |
+
"acc_norm": 0.4,
|
265 |
+
"acc_norm_stderr": 0.049236596391733084
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.49586776859504134,
|
269 |
+
"acc_stderr": 0.04564198767432754,
|
270 |
+
"acc_norm": 0.49586776859504134,
|
271 |
+
"acc_norm_stderr": 0.04564198767432754
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.2565789473684211,
|
275 |
+
"acc_stderr": 0.0355418036802569,
|
276 |
+
"acc_norm": 0.2565789473684211,
|
277 |
+
"acc_norm_stderr": 0.0355418036802569
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.31209150326797386,
|
281 |
+
"acc_stderr": 0.018745011201277657,
|
282 |
+
"acc_norm": 0.31209150326797386,
|
283 |
+
"acc_norm_stderr": 0.018745011201277657
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2695035460992908,
|
287 |
+
"acc_stderr": 0.026469036818590634,
|
288 |
+
"acc_norm": 0.2695035460992908,
|
289 |
+
"acc_norm_stderr": 0.026469036818590634
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.35714285714285715,
|
293 |
+
"acc_stderr": 0.04547960999764376,
|
294 |
+
"acc_norm": 0.35714285714285715,
|
295 |
+
"acc_norm_stderr": 0.04547960999764376
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3101851851851852,
|
299 |
+
"acc_stderr": 0.031546962856566295,
|
300 |
+
"acc_norm": 0.3101851851851852,
|
301 |
+
"acc_norm_stderr": 0.031546962856566295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.23687150837988827,
|
305 |
+
"acc_stderr": 0.014219570788103982,
|
306 |
+
"acc_norm": 0.23687150837988827,
|
307 |
+
"acc_norm_stderr": 0.014219570788103982
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.33,
|
311 |
+
"acc_stderr": 0.04725815626252606,
|
312 |
+
"acc_norm": 0.33,
|
313 |
+
"acc_norm_stderr": 0.04725815626252606
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.41,
|
317 |
+
"acc_stderr": 0.04943110704237102,
|
318 |
+
"acc_norm": 0.41,
|
319 |
+
"acc_norm_stderr": 0.04943110704237102
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3382352941176471,
|
323 |
+
"acc_stderr": 0.028739328513983583,
|
324 |
+
"acc_norm": 0.3382352941176471,
|
325 |
+
"acc_norm_stderr": 0.028739328513983583
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.21224489795918366,
|
329 |
+
"acc_stderr": 0.026176967197866764,
|
330 |
+
"acc_norm": 0.21224489795918366,
|
331 |
+
"acc_norm_stderr": 0.026176967197866764
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.4430379746835443,
|
335 |
+
"acc_stderr": 0.03233532777533484,
|
336 |
+
"acc_norm": 0.4430379746835443,
|
337 |
+
"acc_norm_stderr": 0.03233532777533484
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2803129074315515,
|
341 |
+
"acc_stderr": 0.01147155594495862,
|
342 |
+
"acc_norm": 0.2803129074315515,
|
343 |
+
"acc_norm_stderr": 0.01147155594495862
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.31862745098039214,
|
347 |
+
"acc_stderr": 0.0327028718148208,
|
348 |
+
"acc_norm": 0.31862745098039214,
|
349 |
+
"acc_norm_stderr": 0.0327028718148208
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3151515151515151,
|
353 |
+
"acc_stderr": 0.0362773057502241,
|
354 |
+
"acc_norm": 0.3151515151515151,
|
355 |
+
"acc_norm_stderr": 0.0362773057502241
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2913096695226438,
|
359 |
+
"mc1_stderr": 0.01590598704818483,
|
360 |
+
"mc2": 0.44739358713372257,
|
361 |
+
"mc2_stderr": 0.015551996419791592
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2987012987012987,
|
365 |
+
"acc_stderr": 0.01573565739143828,
|
366 |
+
"acc_norm": 0.4085005903187721,
|
367 |
+
"acc_norm_stderr": 0.01690006287942712
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8",
|
436 |
+
"model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8/result_2024-07-02 08:54:35.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2525597269624573,
|
5 |
+
"acc_stderr": 0.012696728980207708,
|
6 |
+
"acc_norm": 0.31399317406143346,
|
7 |
+
"acc_norm_stderr": 0.013562691224726304
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.32752439753037244,
|
11 |
+
"acc_stderr": 0.004683511716552236,
|
12 |
+
"acc_norm": 0.40967934674367656,
|
13 |
+
"acc_norm_stderr": 0.0049076947279356915
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.42105263157894735,
|
17 |
+
"acc_stderr": 0.037867207062342145,
|
18 |
+
"acc_norm": 0.42105263157894735,
|
19 |
+
"acc_norm_stderr": 0.037867207062342145
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2912621359223301,
|
23 |
+
"acc_stderr": 0.044986763205729224,
|
24 |
+
"acc_norm": 0.2912621359223301,
|
25 |
+
"acc_norm_stderr": 0.044986763205729224
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.36015325670498083,
|
29 |
+
"acc_stderr": 0.0171663624713693,
|
30 |
+
"acc_norm": 0.36015325670498083,
|
31 |
+
"acc_norm_stderr": 0.0171663624713693
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.28888888888888886,
|
35 |
+
"acc_stderr": 0.0391545063041425,
|
36 |
+
"acc_norm": 0.28888888888888886,
|
37 |
+
"acc_norm_stderr": 0.0391545063041425
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.37,
|
41 |
+
"acc_stderr": 0.048523658709391,
|
42 |
+
"acc_norm": 0.37,
|
43 |
+
"acc_norm_stderr": 0.048523658709391
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3659574468085106,
|
47 |
+
"acc_stderr": 0.0314895582974553,
|
48 |
+
"acc_norm": 0.3659574468085106,
|
49 |
+
"acc_norm_stderr": 0.0314895582974553
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.28313253012048195,
|
53 |
+
"acc_stderr": 0.03507295431370518,
|
54 |
+
"acc_norm": 0.28313253012048195,
|
55 |
+
"acc_norm_stderr": 0.03507295431370518
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.33762057877813506,
|
59 |
+
"acc_stderr": 0.026858825879488544,
|
60 |
+
"acc_norm": 0.33762057877813506,
|
61 |
+
"acc_norm_stderr": 0.026858825879488544
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.34977578475336324,
|
65 |
+
"acc_stderr": 0.03200736719484503,
|
66 |
+
"acc_norm": 0.34977578475336324,
|
67 |
+
"acc_norm_stderr": 0.03200736719484503
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.32061068702290074,
|
71 |
+
"acc_stderr": 0.04093329229834278,
|
72 |
+
"acc_norm": 0.32061068702290074,
|
73 |
+
"acc_norm_stderr": 0.04093329229834278
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.36,
|
77 |
+
"acc_stderr": 0.04824181513244218,
|
78 |
+
"acc_norm": 0.36,
|
79 |
+
"acc_norm_stderr": 0.04824181513244218
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.3383838383838384,
|
83 |
+
"acc_stderr": 0.03371124142626303,
|
84 |
+
"acc_norm": 0.3383838383838384,
|
85 |
+
"acc_norm_stderr": 0.03371124142626303
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5379310344827586,
|
89 |
+
"acc_stderr": 0.04154659671707548,
|
90 |
+
"acc_norm": 0.5379310344827586,
|
91 |
+
"acc_norm_stderr": 0.04154659671707548
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.20588235294117646,
|
95 |
+
"acc_stderr": 0.04023382273617749,
|
96 |
+
"acc_norm": 0.20588235294117646,
|
97 |
+
"acc_norm_stderr": 0.04023382273617749
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.37815126050420167,
|
101 |
+
"acc_stderr": 0.031499305777849054,
|
102 |
+
"acc_norm": 0.37815126050420167,
|
103 |
+
"acc_norm_stderr": 0.031499305777849054
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.3153846153846154,
|
107 |
+
"acc_stderr": 0.02355964698318995,
|
108 |
+
"acc_norm": 0.3153846153846154,
|
109 |
+
"acc_norm_stderr": 0.02355964698318995
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.47,
|
113 |
+
"acc_stderr": 0.05016135580465919,
|
114 |
+
"acc_norm": 0.47,
|
115 |
+
"acc_norm_stderr": 0.05016135580465919
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.048241815132442176,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.048241815132442176
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.39814814814814814,
|
125 |
+
"acc_stderr": 0.04732332615978815,
|
126 |
+
"acc_norm": 0.39814814814814814,
|
127 |
+
"acc_norm_stderr": 0.04732332615978815
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3399014778325123,
|
131 |
+
"acc_stderr": 0.033327690684107895,
|
132 |
+
"acc_norm": 0.3399014778325123,
|
133 |
+
"acc_norm_stderr": 0.033327690684107895
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.38387096774193546,
|
137 |
+
"acc_stderr": 0.027666182075539635,
|
138 |
+
"acc_norm": 0.38387096774193546,
|
139 |
+
"acc_norm_stderr": 0.027666182075539635
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.5598290598290598,
|
143 |
+
"acc_stderr": 0.032520741720630506,
|
144 |
+
"acc_norm": 0.5598290598290598,
|
145 |
+
"acc_norm_stderr": 0.032520741720630506
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.39622641509433965,
|
149 |
+
"acc_stderr": 0.03010279378179119,
|
150 |
+
"acc_norm": 0.39622641509433965,
|
151 |
+
"acc_norm_stderr": 0.03010279378179119
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4,
|
155 |
+
"acc_stderr": 0.0469237132203465,
|
156 |
+
"acc_norm": 0.4,
|
157 |
+
"acc_norm_stderr": 0.0469237132203465
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.34814814814814815,
|
161 |
+
"acc_stderr": 0.02904560029061626,
|
162 |
+
"acc_norm": 0.34814814814814815,
|
163 |
+
"acc_norm_stderr": 0.02904560029061626
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2847682119205298,
|
167 |
+
"acc_stderr": 0.03684881521389023,
|
168 |
+
"acc_norm": 0.2847682119205298,
|
169 |
+
"acc_norm_stderr": 0.03684881521389023
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.40298507462686567,
|
173 |
+
"acc_stderr": 0.034683432951111266,
|
174 |
+
"acc_norm": 0.40298507462686567,
|
175 |
+
"acc_norm_stderr": 0.034683432951111266
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.31213872832369943,
|
179 |
+
"acc_stderr": 0.035331333893236574,
|
180 |
+
"acc_norm": 0.31213872832369943,
|
181 |
+
"acc_norm_stderr": 0.035331333893236574
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3253968253968254,
|
185 |
+
"acc_stderr": 0.024130158299762613,
|
186 |
+
"acc_norm": 0.3253968253968254,
|
187 |
+
"acc_norm_stderr": 0.024130158299762613
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2847222222222222,
|
191 |
+
"acc_stderr": 0.03773809990686935,
|
192 |
+
"acc_norm": 0.2847222222222222,
|
193 |
+
"acc_norm_stderr": 0.03773809990686935
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.25,
|
197 |
+
"acc_stderr": 0.04351941398892446,
|
198 |
+
"acc_norm": 0.25,
|
199 |
+
"acc_norm_stderr": 0.04351941398892446
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.48,
|
203 |
+
"acc_stderr": 0.050211673156867795,
|
204 |
+
"acc_norm": 0.48,
|
205 |
+
"acc_norm_stderr": 0.050211673156867795
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.3352601156069364,
|
209 |
+
"acc_stderr": 0.025416003773165555,
|
210 |
+
"acc_norm": 0.3352601156069364,
|
211 |
+
"acc_norm_stderr": 0.025416003773165555
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3558282208588957,
|
215 |
+
"acc_stderr": 0.03761521380046734,
|
216 |
+
"acc_norm": 0.3558282208588957,
|
217 |
+
"acc_norm_stderr": 0.03761521380046734
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.3425925925925926,
|
221 |
+
"acc_stderr": 0.026406145973625672,
|
222 |
+
"acc_norm": 0.3425925925925926,
|
223 |
+
"acc_norm_stderr": 0.026406145973625672
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.3626943005181347,
|
233 |
+
"acc_stderr": 0.03469713791704372,
|
234 |
+
"acc_norm": 0.3626943005181347,
|
235 |
+
"acc_norm_stderr": 0.03469713791704372
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.04227054451232199,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.04227054451232199
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3486238532110092,
|
245 |
+
"acc_stderr": 0.020431254090714317,
|
246 |
+
"acc_norm": 0.3486238532110092,
|
247 |
+
"acc_norm_stderr": 0.020431254090714317
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2698412698412698,
|
251 |
+
"acc_stderr": 0.03970158273235172,
|
252 |
+
"acc_norm": 0.2698412698412698,
|
253 |
+
"acc_norm_stderr": 0.03970158273235172
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.38235294117647056,
|
257 |
+
"acc_stderr": 0.02782610930728369,
|
258 |
+
"acc_norm": 0.38235294117647056,
|
259 |
+
"acc_norm_stderr": 0.02782610930728369
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.4,
|
263 |
+
"acc_stderr": 0.049236596391733084,
|
264 |
+
"acc_norm": 0.4,
|
265 |
+
"acc_norm_stderr": 0.049236596391733084
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5041322314049587,
|
269 |
+
"acc_stderr": 0.045641987674327526,
|
270 |
+
"acc_norm": 0.5041322314049587,
|
271 |
+
"acc_norm_stderr": 0.045641987674327526
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.2565789473684211,
|
275 |
+
"acc_stderr": 0.0355418036802569,
|
276 |
+
"acc_norm": 0.2565789473684211,
|
277 |
+
"acc_norm_stderr": 0.0355418036802569
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3137254901960784,
|
281 |
+
"acc_stderr": 0.018771683893528186,
|
282 |
+
"acc_norm": 0.3137254901960784,
|
283 |
+
"acc_norm_stderr": 0.018771683893528186
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.26595744680851063,
|
287 |
+
"acc_stderr": 0.026358065698880592,
|
288 |
+
"acc_norm": 0.26595744680851063,
|
289 |
+
"acc_norm_stderr": 0.026358065698880592
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.36607142857142855,
|
293 |
+
"acc_stderr": 0.045723723587374296,
|
294 |
+
"acc_norm": 0.36607142857142855,
|
295 |
+
"acc_norm_stderr": 0.045723723587374296
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3101851851851852,
|
299 |
+
"acc_stderr": 0.031546962856566295,
|
300 |
+
"acc_norm": 0.3101851851851852,
|
301 |
+
"acc_norm_stderr": 0.031546962856566295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.23687150837988827,
|
305 |
+
"acc_stderr": 0.014219570788103982,
|
306 |
+
"acc_norm": 0.23687150837988827,
|
307 |
+
"acc_norm_stderr": 0.014219570788103982
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.32,
|
311 |
+
"acc_stderr": 0.04688261722621504,
|
312 |
+
"acc_norm": 0.32,
|
313 |
+
"acc_norm_stderr": 0.04688261722621504
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.42,
|
317 |
+
"acc_stderr": 0.04960449637488584,
|
318 |
+
"acc_norm": 0.42,
|
319 |
+
"acc_norm_stderr": 0.04960449637488584
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3382352941176471,
|
323 |
+
"acc_stderr": 0.028739328513983583,
|
324 |
+
"acc_norm": 0.3382352941176471,
|
325 |
+
"acc_norm_stderr": 0.028739328513983583
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.20816326530612245,
|
329 |
+
"acc_stderr": 0.025991117672813296,
|
330 |
+
"acc_norm": 0.20816326530612245,
|
331 |
+
"acc_norm_stderr": 0.025991117672813296
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.4472573839662447,
|
335 |
+
"acc_stderr": 0.03236564251614192,
|
336 |
+
"acc_norm": 0.4472573839662447,
|
337 |
+
"acc_norm_stderr": 0.03236564251614192
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2790091264667536,
|
341 |
+
"acc_stderr": 0.011455208832803529,
|
342 |
+
"acc_norm": 0.2790091264667536,
|
343 |
+
"acc_norm_stderr": 0.011455208832803529
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.31862745098039214,
|
347 |
+
"acc_stderr": 0.0327028718148208,
|
348 |
+
"acc_norm": 0.31862745098039214,
|
349 |
+
"acc_norm_stderr": 0.0327028718148208
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3151515151515151,
|
353 |
+
"acc_stderr": 0.0362773057502241,
|
354 |
+
"acc_norm": 0.3151515151515151,
|
355 |
+
"acc_norm_stderr": 0.0362773057502241
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2913096695226438,
|
359 |
+
"mc1_stderr": 0.01590598704818483,
|
360 |
+
"mc2": 0.44742099912433764,
|
361 |
+
"mc2_stderr": 0.015551700567433569
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2987012987012987,
|
365 |
+
"acc_stderr": 0.01573565739143828,
|
366 |
+
"acc_norm": 0.4085005903187721,
|
367 |
+
"acc_norm_stderr": 0.01690006287942712
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8",
|
436 |
+
"model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8b-sl/result_2024-07-03 13:25:55.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.31569965870307165,
|
5 |
+
"acc_stderr": 0.013582571095815291,
|
6 |
+
"acc_norm": 0.3720136518771331,
|
7 |
+
"acc_norm_stderr": 0.014124597881844453
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.36317466640111534,
|
11 |
+
"acc_stderr": 0.004799317209902019,
|
12 |
+
"acc_norm": 0.46574387572196774,
|
13 |
+
"acc_norm_stderr": 0.004978056798794869
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.038316105328219316,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.038316105328219316
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5242718446601942,
|
23 |
+
"acc_stderr": 0.049449010929737795,
|
24 |
+
"acc_norm": 0.5242718446601942,
|
25 |
+
"acc_norm_stderr": 0.049449010929737795
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.39208173690932313,
|
29 |
+
"acc_stderr": 0.017458524050147643,
|
30 |
+
"acc_norm": 0.39208173690932313,
|
31 |
+
"acc_norm_stderr": 0.017458524050147643
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.3925925925925926,
|
35 |
+
"acc_stderr": 0.04218506215368879,
|
36 |
+
"acc_norm": 0.3925925925925926,
|
37 |
+
"acc_norm_stderr": 0.04218506215368879
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.2,
|
41 |
+
"acc_stderr": 0.04020151261036844,
|
42 |
+
"acc_norm": 0.2,
|
43 |
+
"acc_norm_stderr": 0.04020151261036844
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3829787234042553,
|
47 |
+
"acc_stderr": 0.03177821250236922,
|
48 |
+
"acc_norm": 0.3829787234042553,
|
49 |
+
"acc_norm_stderr": 0.03177821250236922
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.39156626506024095,
|
53 |
+
"acc_stderr": 0.037998574544796354,
|
54 |
+
"acc_norm": 0.39156626506024095,
|
55 |
+
"acc_norm_stderr": 0.037998574544796354
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.47266881028938906,
|
59 |
+
"acc_stderr": 0.028355633568328188,
|
60 |
+
"acc_norm": 0.47266881028938906,
|
61 |
+
"acc_norm_stderr": 0.028355633568328188
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4349775784753363,
|
65 |
+
"acc_stderr": 0.033272833702713445,
|
66 |
+
"acc_norm": 0.4349775784753363,
|
67 |
+
"acc_norm_stderr": 0.033272833702713445
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.48091603053435117,
|
71 |
+
"acc_stderr": 0.04382094705550988,
|
72 |
+
"acc_norm": 0.48091603053435117,
|
73 |
+
"acc_norm_stderr": 0.04382094705550988
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.04960449637488583,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.04960449637488583
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.43434343434343436,
|
83 |
+
"acc_stderr": 0.03531505879359183,
|
84 |
+
"acc_norm": 0.43434343434343436,
|
85 |
+
"acc_norm_stderr": 0.03531505879359183
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5241379310344828,
|
89 |
+
"acc_stderr": 0.041618085035015295,
|
90 |
+
"acc_norm": 0.5241379310344828,
|
91 |
+
"acc_norm_stderr": 0.041618085035015295
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3431372549019608,
|
95 |
+
"acc_stderr": 0.04724007352383888,
|
96 |
+
"acc_norm": 0.3431372549019608,
|
97 |
+
"acc_norm_stderr": 0.04724007352383888
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4579831932773109,
|
101 |
+
"acc_stderr": 0.032363611119519416,
|
102 |
+
"acc_norm": 0.4579831932773109,
|
103 |
+
"acc_norm_stderr": 0.032363611119519416
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4717948717948718,
|
107 |
+
"acc_stderr": 0.025310639254933927,
|
108 |
+
"acc_norm": 0.4717948717948718,
|
109 |
+
"acc_norm_stderr": 0.025310639254933927
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.56,
|
113 |
+
"acc_stderr": 0.0498887651569859,
|
114 |
+
"acc_norm": 0.56,
|
115 |
+
"acc_norm_stderr": 0.0498887651569859
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.32,
|
119 |
+
"acc_stderr": 0.046882617226215034,
|
120 |
+
"acc_norm": 0.32,
|
121 |
+
"acc_norm_stderr": 0.046882617226215034
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5,
|
125 |
+
"acc_stderr": 0.04833682445228318,
|
126 |
+
"acc_norm": 0.5,
|
127 |
+
"acc_norm_stderr": 0.04833682445228318
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4187192118226601,
|
131 |
+
"acc_stderr": 0.034711928605184676,
|
132 |
+
"acc_norm": 0.4187192118226601,
|
133 |
+
"acc_norm_stderr": 0.034711928605184676
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.47419354838709676,
|
137 |
+
"acc_stderr": 0.02840609505765332,
|
138 |
+
"acc_norm": 0.47419354838709676,
|
139 |
+
"acc_norm_stderr": 0.02840609505765332
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6324786324786325,
|
143 |
+
"acc_stderr": 0.03158539157745636,
|
144 |
+
"acc_norm": 0.6324786324786325,
|
145 |
+
"acc_norm_stderr": 0.03158539157745636
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.42641509433962266,
|
149 |
+
"acc_stderr": 0.030437794342983045,
|
150 |
+
"acc_norm": 0.42641509433962266,
|
151 |
+
"acc_norm_stderr": 0.030437794342983045
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.44545454545454544,
|
155 |
+
"acc_stderr": 0.047605488214603246,
|
156 |
+
"acc_norm": 0.44545454545454544,
|
157 |
+
"acc_norm_stderr": 0.047605488214603246
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.35555555555555557,
|
161 |
+
"acc_stderr": 0.029185714949857392,
|
162 |
+
"acc_norm": 0.35555555555555557,
|
163 |
+
"acc_norm_stderr": 0.029185714949857392
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.3576158940397351,
|
167 |
+
"acc_stderr": 0.03913453431177258,
|
168 |
+
"acc_norm": 0.3576158940397351,
|
169 |
+
"acc_norm_stderr": 0.03913453431177258
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5572139303482587,
|
173 |
+
"acc_stderr": 0.03512310964123935,
|
174 |
+
"acc_norm": 0.5572139303482587,
|
175 |
+
"acc_norm_stderr": 0.03512310964123935
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4508670520231214,
|
179 |
+
"acc_stderr": 0.03794012674697029,
|
180 |
+
"acc_norm": 0.4508670520231214,
|
181 |
+
"acc_norm_stderr": 0.03794012674697029
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.36772486772486773,
|
185 |
+
"acc_stderr": 0.02483383982556242,
|
186 |
+
"acc_norm": 0.36772486772486773,
|
187 |
+
"acc_norm_stderr": 0.02483383982556242
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3819444444444444,
|
191 |
+
"acc_stderr": 0.040629907841466674,
|
192 |
+
"acc_norm": 0.3819444444444444,
|
193 |
+
"acc_norm_stderr": 0.040629907841466674
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.41,
|
197 |
+
"acc_stderr": 0.049431107042371025,
|
198 |
+
"acc_norm": 0.41,
|
199 |
+
"acc_norm_stderr": 0.049431107042371025
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.62,
|
203 |
+
"acc_stderr": 0.04878317312145633,
|
204 |
+
"acc_norm": 0.62,
|
205 |
+
"acc_norm_stderr": 0.04878317312145633
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.476878612716763,
|
209 |
+
"acc_stderr": 0.026890297881303125,
|
210 |
+
"acc_norm": 0.476878612716763,
|
211 |
+
"acc_norm_stderr": 0.026890297881303125
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.34355828220858897,
|
215 |
+
"acc_stderr": 0.037311335196738925,
|
216 |
+
"acc_norm": 0.34355828220858897,
|
217 |
+
"acc_norm_stderr": 0.037311335196738925
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4537037037037037,
|
221 |
+
"acc_stderr": 0.027701228468542595,
|
222 |
+
"acc_norm": 0.4537037037037037,
|
223 |
+
"acc_norm_stderr": 0.027701228468542595
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.38,
|
227 |
+
"acc_stderr": 0.048783173121456316,
|
228 |
+
"acc_norm": 0.38,
|
229 |
+
"acc_norm_stderr": 0.048783173121456316
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5284974093264249,
|
233 |
+
"acc_stderr": 0.03602573571288441,
|
234 |
+
"acc_norm": 0.5284974093264249,
|
235 |
+
"acc_norm_stderr": 0.03602573571288441
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.3684210526315789,
|
239 |
+
"acc_stderr": 0.04537815354939391,
|
240 |
+
"acc_norm": 0.3684210526315789,
|
241 |
+
"acc_norm_stderr": 0.04537815354939391
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5155963302752293,
|
245 |
+
"acc_stderr": 0.02142689153920805,
|
246 |
+
"acc_norm": 0.5155963302752293,
|
247 |
+
"acc_norm_stderr": 0.02142689153920805
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.30952380952380953,
|
251 |
+
"acc_stderr": 0.04134913018303316,
|
252 |
+
"acc_norm": 0.30952380952380953,
|
253 |
+
"acc_norm_stderr": 0.04134913018303316
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.5098039215686274,
|
257 |
+
"acc_stderr": 0.028624412550167958,
|
258 |
+
"acc_norm": 0.5098039215686274,
|
259 |
+
"acc_norm_stderr": 0.028624412550167958
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.51,
|
263 |
+
"acc_stderr": 0.05024183937956914,
|
264 |
+
"acc_norm": 0.51,
|
265 |
+
"acc_norm_stderr": 0.05024183937956914
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6446280991735537,
|
269 |
+
"acc_stderr": 0.0436923632657398,
|
270 |
+
"acc_norm": 0.6446280991735537,
|
271 |
+
"acc_norm_stderr": 0.0436923632657398
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4868421052631579,
|
275 |
+
"acc_stderr": 0.04067533136309173,
|
276 |
+
"acc_norm": 0.4868421052631579,
|
277 |
+
"acc_norm_stderr": 0.04067533136309173
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.4068627450980392,
|
281 |
+
"acc_stderr": 0.019873802005061177,
|
282 |
+
"acc_norm": 0.4068627450980392,
|
283 |
+
"acc_norm_stderr": 0.019873802005061177
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.33687943262411346,
|
287 |
+
"acc_stderr": 0.02819553487396673,
|
288 |
+
"acc_norm": 0.33687943262411346,
|
289 |
+
"acc_norm_stderr": 0.02819553487396673
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.375,
|
293 |
+
"acc_stderr": 0.04595091388086298,
|
294 |
+
"acc_norm": 0.375,
|
295 |
+
"acc_norm_stderr": 0.04595091388086298
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.39351851851851855,
|
299 |
+
"acc_stderr": 0.03331747876370312,
|
300 |
+
"acc_norm": 0.39351851851851855,
|
301 |
+
"acc_norm_stderr": 0.03331747876370312
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.26145251396648045,
|
305 |
+
"acc_stderr": 0.014696599650364555,
|
306 |
+
"acc_norm": 0.26145251396648045,
|
307 |
+
"acc_norm_stderr": 0.014696599650364555
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.34,
|
311 |
+
"acc_stderr": 0.04760952285695235,
|
312 |
+
"acc_norm": 0.34,
|
313 |
+
"acc_norm_stderr": 0.04760952285695235
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.61,
|
317 |
+
"acc_stderr": 0.04902071300001974,
|
318 |
+
"acc_norm": 0.61,
|
319 |
+
"acc_norm_stderr": 0.04902071300001974
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.35661764705882354,
|
323 |
+
"acc_stderr": 0.02909720956841196,
|
324 |
+
"acc_norm": 0.35661764705882354,
|
325 |
+
"acc_norm_stderr": 0.02909720956841196
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.563265306122449,
|
329 |
+
"acc_stderr": 0.031751952375833226,
|
330 |
+
"acc_norm": 0.563265306122449,
|
331 |
+
"acc_norm_stderr": 0.031751952375833226
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.540084388185654,
|
335 |
+
"acc_stderr": 0.03244246810187913,
|
336 |
+
"acc_norm": 0.540084388185654,
|
337 |
+
"acc_norm_stderr": 0.03244246810187913
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.31290743155149936,
|
341 |
+
"acc_stderr": 0.011842529823062997,
|
342 |
+
"acc_norm": 0.31290743155149936,
|
343 |
+
"acc_norm_stderr": 0.011842529823062997
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.4362745098039216,
|
347 |
+
"acc_stderr": 0.03480693138457039,
|
348 |
+
"acc_norm": 0.4362745098039216,
|
349 |
+
"acc_norm_stderr": 0.03480693138457039
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5212121212121212,
|
353 |
+
"acc_stderr": 0.03900828913737302,
|
354 |
+
"acc_norm": 0.5212121212121212,
|
355 |
+
"acc_norm_stderr": 0.03900828913737302
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2729498164014688,
|
359 |
+
"mc1_stderr": 0.015594753632006516,
|
360 |
+
"mc2": 0.4494211990695322,
|
361 |
+
"mc2_stderr": 0.016190690301781833
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.3990554899645809,
|
365 |
+
"acc_stderr": 0.0168363772928493,
|
366 |
+
"acc_norm": 0.4498229043683589,
|
367 |
+
"acc_norm_stderr": 0.01710357334382571
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8b-sl",
|
436 |
+
"model_sha": "5f82a1a6e36c50db9a2ee4b815d742b27cdb6023",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8b-sl2/result_2024-07-04 14:21:25.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3148464163822526,
|
5 |
+
"acc_stderr": 0.013572657703084948,
|
6 |
+
"acc_norm": 0.3583617747440273,
|
7 |
+
"acc_norm_stderr": 0.01401288333485987
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.358195578570006,
|
11 |
+
"acc_stderr": 0.004784901248558722,
|
12 |
+
"acc_norm": 0.44971121290579563,
|
13 |
+
"acc_norm_stderr": 0.004964479324552529
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5146198830409356,
|
17 |
+
"acc_stderr": 0.038331852752130254,
|
18 |
+
"acc_norm": 0.5146198830409356,
|
19 |
+
"acc_norm_stderr": 0.038331852752130254
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.4563106796116505,
|
23 |
+
"acc_stderr": 0.049318019942204146,
|
24 |
+
"acc_norm": 0.4563106796116505,
|
25 |
+
"acc_norm_stderr": 0.049318019942204146
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.3997445721583653,
|
29 |
+
"acc_stderr": 0.01751684790705327,
|
30 |
+
"acc_norm": 0.3997445721583653,
|
31 |
+
"acc_norm_stderr": 0.01751684790705327
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.28888888888888886,
|
35 |
+
"acc_stderr": 0.03915450630414251,
|
36 |
+
"acc_norm": 0.28888888888888886,
|
37 |
+
"acc_norm_stderr": 0.03915450630414251
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.3,
|
41 |
+
"acc_stderr": 0.046056618647183814,
|
42 |
+
"acc_norm": 0.3,
|
43 |
+
"acc_norm_stderr": 0.046056618647183814
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.39148936170212767,
|
47 |
+
"acc_stderr": 0.03190701242326812,
|
48 |
+
"acc_norm": 0.39148936170212767,
|
49 |
+
"acc_norm_stderr": 0.03190701242326812
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.35542168674698793,
|
53 |
+
"acc_stderr": 0.03726214354322415,
|
54 |
+
"acc_norm": 0.35542168674698793,
|
55 |
+
"acc_norm_stderr": 0.03726214354322415
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.45016077170418006,
|
59 |
+
"acc_stderr": 0.028256660723360177,
|
60 |
+
"acc_norm": 0.45016077170418006,
|
61 |
+
"acc_norm_stderr": 0.028256660723360177
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4349775784753363,
|
65 |
+
"acc_stderr": 0.033272833702713445,
|
66 |
+
"acc_norm": 0.4349775784753363,
|
67 |
+
"acc_norm_stderr": 0.033272833702713445
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4122137404580153,
|
71 |
+
"acc_stderr": 0.04317171194870254,
|
72 |
+
"acc_norm": 0.4122137404580153,
|
73 |
+
"acc_norm_stderr": 0.04317171194870254
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.47,
|
77 |
+
"acc_stderr": 0.05016135580465918,
|
78 |
+
"acc_norm": 0.47,
|
79 |
+
"acc_norm_stderr": 0.05016135580465918
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.4090909090909091,
|
83 |
+
"acc_stderr": 0.03502975799413007,
|
84 |
+
"acc_norm": 0.4090909090909091,
|
85 |
+
"acc_norm_stderr": 0.03502975799413007
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.45517241379310347,
|
89 |
+
"acc_stderr": 0.04149886942192117,
|
90 |
+
"acc_norm": 0.45517241379310347,
|
91 |
+
"acc_norm_stderr": 0.04149886942192117
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.2647058823529412,
|
95 |
+
"acc_stderr": 0.04389869956808778,
|
96 |
+
"acc_norm": 0.2647058823529412,
|
97 |
+
"acc_norm_stderr": 0.04389869956808778
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.5084033613445378,
|
101 |
+
"acc_stderr": 0.0324739027656967,
|
102 |
+
"acc_norm": 0.5084033613445378,
|
103 |
+
"acc_norm_stderr": 0.0324739027656967
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.441025641025641,
|
107 |
+
"acc_stderr": 0.025174048384000777,
|
108 |
+
"acc_norm": 0.441025641025641,
|
109 |
+
"acc_norm_stderr": 0.025174048384000777
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.51,
|
113 |
+
"acc_stderr": 0.05024183937956912,
|
114 |
+
"acc_norm": 0.51,
|
115 |
+
"acc_norm_stderr": 0.05024183937956912
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.35,
|
119 |
+
"acc_stderr": 0.0479372485441102,
|
120 |
+
"acc_norm": 0.35,
|
121 |
+
"acc_norm_stderr": 0.0479372485441102
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.6111111111111112,
|
125 |
+
"acc_stderr": 0.04712821257426769,
|
126 |
+
"acc_norm": 0.6111111111111112,
|
127 |
+
"acc_norm_stderr": 0.04712821257426769
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.43842364532019706,
|
131 |
+
"acc_stderr": 0.03491207857486518,
|
132 |
+
"acc_norm": 0.43842364532019706,
|
133 |
+
"acc_norm_stderr": 0.03491207857486518
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.47096774193548385,
|
137 |
+
"acc_stderr": 0.028396016402761008,
|
138 |
+
"acc_norm": 0.47096774193548385,
|
139 |
+
"acc_norm_stderr": 0.028396016402761008
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6623931623931624,
|
143 |
+
"acc_stderr": 0.030980296992618558,
|
144 |
+
"acc_norm": 0.6623931623931624,
|
145 |
+
"acc_norm_stderr": 0.030980296992618558
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.44528301886792454,
|
149 |
+
"acc_stderr": 0.030588052974270655,
|
150 |
+
"acc_norm": 0.44528301886792454,
|
151 |
+
"acc_norm_stderr": 0.030588052974270655
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4636363636363636,
|
155 |
+
"acc_stderr": 0.047764491623961985,
|
156 |
+
"acc_norm": 0.4636363636363636,
|
157 |
+
"acc_norm_stderr": 0.047764491623961985
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.37037037037037035,
|
161 |
+
"acc_stderr": 0.02944316932303154,
|
162 |
+
"acc_norm": 0.37037037037037035,
|
163 |
+
"acc_norm_stderr": 0.02944316932303154
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.3841059602649007,
|
167 |
+
"acc_stderr": 0.03971301814719198,
|
168 |
+
"acc_norm": 0.3841059602649007,
|
169 |
+
"acc_norm_stderr": 0.03971301814719198
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5323383084577115,
|
173 |
+
"acc_stderr": 0.035281314729336065,
|
174 |
+
"acc_norm": 0.5323383084577115,
|
175 |
+
"acc_norm_stderr": 0.035281314729336065
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.44508670520231214,
|
179 |
+
"acc_stderr": 0.03789401760283647,
|
180 |
+
"acc_norm": 0.44508670520231214,
|
181 |
+
"acc_norm_stderr": 0.03789401760283647
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.36243386243386244,
|
185 |
+
"acc_stderr": 0.024757473902752045,
|
186 |
+
"acc_norm": 0.36243386243386244,
|
187 |
+
"acc_norm_stderr": 0.024757473902752045
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.375,
|
191 |
+
"acc_stderr": 0.04048439222695598,
|
192 |
+
"acc_norm": 0.375,
|
193 |
+
"acc_norm_stderr": 0.04048439222695598
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.4,
|
197 |
+
"acc_stderr": 0.04923659639173309,
|
198 |
+
"acc_norm": 0.4,
|
199 |
+
"acc_norm_stderr": 0.04923659639173309
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.58,
|
203 |
+
"acc_stderr": 0.049604496374885836,
|
204 |
+
"acc_norm": 0.58,
|
205 |
+
"acc_norm_stderr": 0.049604496374885836
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.4595375722543353,
|
209 |
+
"acc_stderr": 0.026830805998952243,
|
210 |
+
"acc_norm": 0.4595375722543353,
|
211 |
+
"acc_norm_stderr": 0.026830805998952243
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.37423312883435583,
|
215 |
+
"acc_stderr": 0.03802068102899615,
|
216 |
+
"acc_norm": 0.37423312883435583,
|
217 |
+
"acc_norm_stderr": 0.03802068102899615
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4660493827160494,
|
221 |
+
"acc_stderr": 0.027756535257347663,
|
222 |
+
"acc_norm": 0.4660493827160494,
|
223 |
+
"acc_norm_stderr": 0.027756535257347663
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.31,
|
227 |
+
"acc_stderr": 0.04648231987117316,
|
228 |
+
"acc_norm": 0.31,
|
229 |
+
"acc_norm_stderr": 0.04648231987117316
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.47150259067357514,
|
233 |
+
"acc_stderr": 0.036025735712884414,
|
234 |
+
"acc_norm": 0.47150259067357514,
|
235 |
+
"acc_norm_stderr": 0.036025735712884414
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.04227054451232199,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.04227054451232199
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5192660550458715,
|
245 |
+
"acc_stderr": 0.02142140298254888,
|
246 |
+
"acc_norm": 0.5192660550458715,
|
247 |
+
"acc_norm_stderr": 0.02142140298254888
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2857142857142857,
|
251 |
+
"acc_stderr": 0.0404061017820884,
|
252 |
+
"acc_norm": 0.2857142857142857,
|
253 |
+
"acc_norm_stderr": 0.0404061017820884
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.5424836601307189,
|
257 |
+
"acc_stderr": 0.028526383452142638,
|
258 |
+
"acc_norm": 0.5424836601307189,
|
259 |
+
"acc_norm_stderr": 0.028526383452142638
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.56,
|
263 |
+
"acc_stderr": 0.04988876515698589,
|
264 |
+
"acc_norm": 0.56,
|
265 |
+
"acc_norm_stderr": 0.04988876515698589
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5950413223140496,
|
269 |
+
"acc_stderr": 0.044811377559424694,
|
270 |
+
"acc_norm": 0.5950413223140496,
|
271 |
+
"acc_norm_stderr": 0.044811377559424694
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4342105263157895,
|
275 |
+
"acc_stderr": 0.040335656678483205,
|
276 |
+
"acc_norm": 0.4342105263157895,
|
277 |
+
"acc_norm_stderr": 0.040335656678483205
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.41830065359477125,
|
281 |
+
"acc_stderr": 0.019955975145835542,
|
282 |
+
"acc_norm": 0.41830065359477125,
|
283 |
+
"acc_norm_stderr": 0.019955975145835542
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3049645390070922,
|
287 |
+
"acc_stderr": 0.02746470844202213,
|
288 |
+
"acc_norm": 0.3049645390070922,
|
289 |
+
"acc_norm_stderr": 0.02746470844202213
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.35714285714285715,
|
293 |
+
"acc_stderr": 0.04547960999764376,
|
294 |
+
"acc_norm": 0.35714285714285715,
|
295 |
+
"acc_norm_stderr": 0.04547960999764376
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4027777777777778,
|
299 |
+
"acc_stderr": 0.03344887382997866,
|
300 |
+
"acc_norm": 0.4027777777777778,
|
301 |
+
"acc_norm_stderr": 0.03344887382997866
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2335195530726257,
|
305 |
+
"acc_stderr": 0.014149575348976273,
|
306 |
+
"acc_norm": 0.2335195530726257,
|
307 |
+
"acc_norm_stderr": 0.014149575348976273
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.42,
|
311 |
+
"acc_stderr": 0.049604496374885836,
|
312 |
+
"acc_norm": 0.42,
|
313 |
+
"acc_norm_stderr": 0.049604496374885836
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.56,
|
317 |
+
"acc_stderr": 0.04988876515698589,
|
318 |
+
"acc_norm": 0.56,
|
319 |
+
"acc_norm_stderr": 0.04988876515698589
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.4007352941176471,
|
323 |
+
"acc_stderr": 0.029768263528933105,
|
324 |
+
"acc_norm": 0.4007352941176471,
|
325 |
+
"acc_norm_stderr": 0.029768263528933105
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5224489795918368,
|
329 |
+
"acc_stderr": 0.03197694118713671,
|
330 |
+
"acc_norm": 0.5224489795918368,
|
331 |
+
"acc_norm_stderr": 0.03197694118713671
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.510548523206751,
|
335 |
+
"acc_stderr": 0.032539983791662855,
|
336 |
+
"acc_norm": 0.510548523206751,
|
337 |
+
"acc_norm_stderr": 0.032539983791662855
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.31877444589308995,
|
341 |
+
"acc_stderr": 0.011901895635786084,
|
342 |
+
"acc_norm": 0.31877444589308995,
|
343 |
+
"acc_norm_stderr": 0.011901895635786084
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.45098039215686275,
|
347 |
+
"acc_stderr": 0.03492406104163613,
|
348 |
+
"acc_norm": 0.45098039215686275,
|
349 |
+
"acc_norm_stderr": 0.03492406104163613
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5333333333333333,
|
353 |
+
"acc_stderr": 0.03895658065271846,
|
354 |
+
"acc_norm": 0.5333333333333333,
|
355 |
+
"acc_norm_stderr": 0.03895658065271846
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2729498164014688,
|
359 |
+
"mc1_stderr": 0.015594753632006514,
|
360 |
+
"mc2": 0.4303948510286609,
|
361 |
+
"mc2_stderr": 0.01639757694486845
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.3659976387249115,
|
365 |
+
"acc_stderr": 0.0165614896648957,
|
366 |
+
"acc_norm": 0.3990554899645809,
|
367 |
+
"acc_norm_stderr": 0.016836377292849303
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8b-sl2",
|
436 |
+
"model_sha": "d99912a733ba3a27a3fa36d6e9fb9b4558cefb60",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8b-sl3/result_2024-07-08 02:28:51.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.3097269624573379,
|
5 |
+
"acc_stderr": 0.013512058415238361,
|
6 |
+
"acc_norm": 0.3796928327645051,
|
7 |
+
"acc_norm_stderr": 0.014182119866974869
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3564031069508066,
|
11 |
+
"acc_stderr": 0.004779574402771384,
|
12 |
+
"acc_norm": 0.45379406492730534,
|
13 |
+
"acc_norm_stderr": 0.004968429476345018
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4678362573099415,
|
17 |
+
"acc_stderr": 0.038268824176603676,
|
18 |
+
"acc_norm": 0.4678362573099415,
|
19 |
+
"acc_norm_stderr": 0.038268824176603676
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5631067961165048,
|
23 |
+
"acc_stderr": 0.04911147107365777,
|
24 |
+
"acc_norm": 0.5631067961165048,
|
25 |
+
"acc_norm_stderr": 0.04911147107365777
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.4061302681992337,
|
29 |
+
"acc_stderr": 0.017562037406478912,
|
30 |
+
"acc_norm": 0.4061302681992337,
|
31 |
+
"acc_norm_stderr": 0.017562037406478912
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.34814814814814815,
|
35 |
+
"acc_stderr": 0.041153246103369526,
|
36 |
+
"acc_norm": 0.34814814814814815,
|
37 |
+
"acc_norm_stderr": 0.041153246103369526
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.18,
|
41 |
+
"acc_stderr": 0.03861229196653697,
|
42 |
+
"acc_norm": 0.18,
|
43 |
+
"acc_norm_stderr": 0.03861229196653697
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.40425531914893614,
|
47 |
+
"acc_stderr": 0.03208115750788682,
|
48 |
+
"acc_norm": 0.40425531914893614,
|
49 |
+
"acc_norm_stderr": 0.03208115750788682
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.40963855421686746,
|
53 |
+
"acc_stderr": 0.038284011150790206,
|
54 |
+
"acc_norm": 0.40963855421686746,
|
55 |
+
"acc_norm_stderr": 0.038284011150790206
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.42765273311897106,
|
59 |
+
"acc_stderr": 0.028099240775809567,
|
60 |
+
"acc_norm": 0.42765273311897106,
|
61 |
+
"acc_norm_stderr": 0.028099240775809567
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.484304932735426,
|
65 |
+
"acc_stderr": 0.0335412657542081,
|
66 |
+
"acc_norm": 0.484304932735426,
|
67 |
+
"acc_norm_stderr": 0.0335412657542081
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.46564885496183206,
|
71 |
+
"acc_stderr": 0.043749285605997376,
|
72 |
+
"acc_norm": 0.46564885496183206,
|
73 |
+
"acc_norm_stderr": 0.043749285605997376
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.41,
|
77 |
+
"acc_stderr": 0.04943110704237103,
|
78 |
+
"acc_norm": 0.41,
|
79 |
+
"acc_norm_stderr": 0.04943110704237103
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.4292929292929293,
|
83 |
+
"acc_stderr": 0.035265527246011986,
|
84 |
+
"acc_norm": 0.4292929292929293,
|
85 |
+
"acc_norm_stderr": 0.035265527246011986
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4068965517241379,
|
89 |
+
"acc_stderr": 0.04093793981266236,
|
90 |
+
"acc_norm": 0.4068965517241379,
|
91 |
+
"acc_norm_stderr": 0.04093793981266236
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.2647058823529412,
|
95 |
+
"acc_stderr": 0.04389869956808778,
|
96 |
+
"acc_norm": 0.2647058823529412,
|
97 |
+
"acc_norm_stderr": 0.04389869956808778
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.41596638655462187,
|
101 |
+
"acc_stderr": 0.03201650100739615,
|
102 |
+
"acc_norm": 0.41596638655462187,
|
103 |
+
"acc_norm_stderr": 0.03201650100739615
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4641025641025641,
|
107 |
+
"acc_stderr": 0.025285585990017834,
|
108 |
+
"acc_norm": 0.4641025641025641,
|
109 |
+
"acc_norm_stderr": 0.025285585990017834
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.58,
|
113 |
+
"acc_stderr": 0.049604496374885836,
|
114 |
+
"acc_norm": 0.58,
|
115 |
+
"acc_norm_stderr": 0.049604496374885836
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.26,
|
119 |
+
"acc_stderr": 0.0440844002276808,
|
120 |
+
"acc_norm": 0.26,
|
121 |
+
"acc_norm_stderr": 0.0440844002276808
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5370370370370371,
|
125 |
+
"acc_stderr": 0.04820403072760627,
|
126 |
+
"acc_norm": 0.5370370370370371,
|
127 |
+
"acc_norm_stderr": 0.04820403072760627
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4039408866995074,
|
131 |
+
"acc_stderr": 0.03452453903822041,
|
132 |
+
"acc_norm": 0.4039408866995074,
|
133 |
+
"acc_norm_stderr": 0.03452453903822041
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.44516129032258067,
|
137 |
+
"acc_stderr": 0.028272410186214906,
|
138 |
+
"acc_norm": 0.44516129032258067,
|
139 |
+
"acc_norm_stderr": 0.028272410186214906
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6410256410256411,
|
143 |
+
"acc_stderr": 0.03142616993791924,
|
144 |
+
"acc_norm": 0.6410256410256411,
|
145 |
+
"acc_norm_stderr": 0.03142616993791924
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.44150943396226416,
|
149 |
+
"acc_stderr": 0.030561590426731833,
|
150 |
+
"acc_norm": 0.44150943396226416,
|
151 |
+
"acc_norm_stderr": 0.030561590426731833
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4818181818181818,
|
155 |
+
"acc_stderr": 0.04785964010794917,
|
156 |
+
"acc_norm": 0.4818181818181818,
|
157 |
+
"acc_norm_stderr": 0.04785964010794917
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3296296296296296,
|
161 |
+
"acc_stderr": 0.02866120111652458,
|
162 |
+
"acc_norm": 0.3296296296296296,
|
163 |
+
"acc_norm_stderr": 0.02866120111652458
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.33112582781456956,
|
167 |
+
"acc_stderr": 0.038425817186598696,
|
168 |
+
"acc_norm": 0.33112582781456956,
|
169 |
+
"acc_norm_stderr": 0.038425817186598696
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5024875621890548,
|
173 |
+
"acc_stderr": 0.03535490150137288,
|
174 |
+
"acc_norm": 0.5024875621890548,
|
175 |
+
"acc_norm_stderr": 0.03535490150137288
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.44508670520231214,
|
179 |
+
"acc_stderr": 0.03789401760283647,
|
180 |
+
"acc_norm": 0.44508670520231214,
|
181 |
+
"acc_norm_stderr": 0.03789401760283647
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.31746031746031744,
|
185 |
+
"acc_stderr": 0.02397386199899207,
|
186 |
+
"acc_norm": 0.31746031746031744,
|
187 |
+
"acc_norm_stderr": 0.02397386199899207
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4097222222222222,
|
191 |
+
"acc_stderr": 0.04112490974670788,
|
192 |
+
"acc_norm": 0.4097222222222222,
|
193 |
+
"acc_norm_stderr": 0.04112490974670788
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.37,
|
197 |
+
"acc_stderr": 0.04852365870939099,
|
198 |
+
"acc_norm": 0.37,
|
199 |
+
"acc_norm_stderr": 0.04852365870939099
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.54,
|
203 |
+
"acc_stderr": 0.05009082659620333,
|
204 |
+
"acc_norm": 0.54,
|
205 |
+
"acc_norm_stderr": 0.05009082659620333
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.44508670520231214,
|
209 |
+
"acc_stderr": 0.026756255129663765,
|
210 |
+
"acc_norm": 0.44508670520231214,
|
211 |
+
"acc_norm_stderr": 0.026756255129663765
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3987730061349693,
|
215 |
+
"acc_stderr": 0.038470214204560246,
|
216 |
+
"acc_norm": 0.3987730061349693,
|
217 |
+
"acc_norm_stderr": 0.038470214204560246
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4228395061728395,
|
221 |
+
"acc_stderr": 0.0274874729808716,
|
222 |
+
"acc_norm": 0.4228395061728395,
|
223 |
+
"acc_norm_stderr": 0.0274874729808716
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.33,
|
227 |
+
"acc_stderr": 0.047258156262526045,
|
228 |
+
"acc_norm": 0.33,
|
229 |
+
"acc_norm_stderr": 0.047258156262526045
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.49222797927461137,
|
233 |
+
"acc_stderr": 0.03608003225569654,
|
234 |
+
"acc_norm": 0.49222797927461137,
|
235 |
+
"acc_norm_stderr": 0.03608003225569654
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.3157894736842105,
|
239 |
+
"acc_stderr": 0.04372748290278007,
|
240 |
+
"acc_norm": 0.3157894736842105,
|
241 |
+
"acc_norm_stderr": 0.04372748290278007
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.48623853211009177,
|
245 |
+
"acc_stderr": 0.02142920208987408,
|
246 |
+
"acc_norm": 0.48623853211009177,
|
247 |
+
"acc_norm_stderr": 0.02142920208987408
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3333333333333333,
|
251 |
+
"acc_stderr": 0.042163702135578345,
|
252 |
+
"acc_norm": 0.3333333333333333,
|
253 |
+
"acc_norm_stderr": 0.042163702135578345
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4869281045751634,
|
257 |
+
"acc_stderr": 0.028620130800700246,
|
258 |
+
"acc_norm": 0.4869281045751634,
|
259 |
+
"acc_norm_stderr": 0.028620130800700246
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.47,
|
263 |
+
"acc_stderr": 0.050161355804659205,
|
264 |
+
"acc_norm": 0.47,
|
265 |
+
"acc_norm_stderr": 0.050161355804659205
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.628099173553719,
|
269 |
+
"acc_stderr": 0.04412015806624503,
|
270 |
+
"acc_norm": 0.628099173553719,
|
271 |
+
"acc_norm_stderr": 0.04412015806624503
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.4407894736842105,
|
275 |
+
"acc_stderr": 0.04040311062490435,
|
276 |
+
"acc_norm": 0.4407894736842105,
|
277 |
+
"acc_norm_stderr": 0.04040311062490435
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.39052287581699346,
|
281 |
+
"acc_stderr": 0.0197370089980946,
|
282 |
+
"acc_norm": 0.39052287581699346,
|
283 |
+
"acc_norm_stderr": 0.0197370089980946
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.32269503546099293,
|
287 |
+
"acc_stderr": 0.02788913930053478,
|
288 |
+
"acc_norm": 0.32269503546099293,
|
289 |
+
"acc_norm_stderr": 0.02788913930053478
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.38392857142857145,
|
293 |
+
"acc_stderr": 0.04616143075028546,
|
294 |
+
"acc_norm": 0.38392857142857145,
|
295 |
+
"acc_norm_stderr": 0.04616143075028546
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.33796296296296297,
|
299 |
+
"acc_stderr": 0.032259413526312945,
|
300 |
+
"acc_norm": 0.33796296296296297,
|
301 |
+
"acc_norm_stderr": 0.032259413526312945
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2424581005586592,
|
305 |
+
"acc_stderr": 0.01433352205921789,
|
306 |
+
"acc_norm": 0.2424581005586592,
|
307 |
+
"acc_norm_stderr": 0.01433352205921789
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.41,
|
311 |
+
"acc_stderr": 0.049431107042371025,
|
312 |
+
"acc_norm": 0.41,
|
313 |
+
"acc_norm_stderr": 0.049431107042371025
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.55,
|
317 |
+
"acc_stderr": 0.04999999999999999,
|
318 |
+
"acc_norm": 0.55,
|
319 |
+
"acc_norm_stderr": 0.04999999999999999
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3492647058823529,
|
323 |
+
"acc_stderr": 0.028959755196824862,
|
324 |
+
"acc_norm": 0.3492647058823529,
|
325 |
+
"acc_norm_stderr": 0.028959755196824862
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5265306122448979,
|
329 |
+
"acc_stderr": 0.03196412734523272,
|
330 |
+
"acc_norm": 0.5265306122448979,
|
331 |
+
"acc_norm_stderr": 0.03196412734523272
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5063291139240507,
|
335 |
+
"acc_stderr": 0.03254462010767859,
|
336 |
+
"acc_norm": 0.5063291139240507,
|
337 |
+
"acc_norm_stderr": 0.03254462010767859
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.28292046936114734,
|
341 |
+
"acc_stderr": 0.011503891323188976,
|
342 |
+
"acc_norm": 0.28292046936114734,
|
343 |
+
"acc_norm_stderr": 0.011503891323188976
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.45588235294117646,
|
347 |
+
"acc_stderr": 0.03495624522015474,
|
348 |
+
"acc_norm": 0.45588235294117646,
|
349 |
+
"acc_norm_stderr": 0.03495624522015474
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5333333333333333,
|
353 |
+
"acc_stderr": 0.03895658065271846,
|
354 |
+
"acc_norm": 0.5333333333333333,
|
355 |
+
"acc_norm_stderr": 0.03895658065271846
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2839657282741738,
|
359 |
+
"mc1_stderr": 0.015785370858396708,
|
360 |
+
"mc2": 0.4556979185667561,
|
361 |
+
"mc2_stderr": 0.01630139604691971
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.3707201889020071,
|
365 |
+
"acc_stderr": 0.016605801289212605,
|
366 |
+
"acc_norm": 0.40613931523022434,
|
367 |
+
"acc_norm_stderr": 0.016884749503191392
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8b-sl3",
|
436 |
+
"model_sha": "4d29db816f84bee0341d5037f89e2d63c4a05381",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8b-sl4/result_2024-07-10 12:28:24.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.310580204778157,
|
5 |
+
"acc_stderr": 0.013522292098053054,
|
6 |
+
"acc_norm": 0.363481228668942,
|
7 |
+
"acc_norm_stderr": 0.01405620731906828
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3595897231627166,
|
11 |
+
"acc_stderr": 0.004788994060654275,
|
12 |
+
"acc_norm": 0.460565624377614,
|
13 |
+
"acc_norm_stderr": 0.004974238284524824
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.47953216374269003,
|
17 |
+
"acc_stderr": 0.038316105328219316,
|
18 |
+
"acc_norm": 0.47953216374269003,
|
19 |
+
"acc_norm_stderr": 0.038316105328219316
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5436893203883495,
|
23 |
+
"acc_stderr": 0.049318019942204146,
|
24 |
+
"acc_norm": 0.5436893203883495,
|
25 |
+
"acc_norm_stderr": 0.049318019942204146
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.4112388250319285,
|
29 |
+
"acc_stderr": 0.017595971908056576,
|
30 |
+
"acc_norm": 0.4112388250319285,
|
31 |
+
"acc_norm_stderr": 0.017595971908056576
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.34074074074074073,
|
35 |
+
"acc_stderr": 0.04094376269996794,
|
36 |
+
"acc_norm": 0.34074074074074073,
|
37 |
+
"acc_norm_stderr": 0.04094376269996794
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.22,
|
41 |
+
"acc_stderr": 0.041633319989322695,
|
42 |
+
"acc_norm": 0.22,
|
43 |
+
"acc_norm_stderr": 0.041633319989322695
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.4,
|
47 |
+
"acc_stderr": 0.03202563076101736,
|
48 |
+
"acc_norm": 0.4,
|
49 |
+
"acc_norm_stderr": 0.03202563076101736
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3795180722891566,
|
53 |
+
"acc_stderr": 0.037777988227480165,
|
54 |
+
"acc_norm": 0.3795180722891566,
|
55 |
+
"acc_norm_stderr": 0.037777988227480165
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.47266881028938906,
|
59 |
+
"acc_stderr": 0.02835563356832818,
|
60 |
+
"acc_norm": 0.47266881028938906,
|
61 |
+
"acc_norm_stderr": 0.02835563356832818
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4304932735426009,
|
65 |
+
"acc_stderr": 0.033231973029429394,
|
66 |
+
"acc_norm": 0.4304932735426009,
|
67 |
+
"acc_norm_stderr": 0.033231973029429394
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.4732824427480916,
|
71 |
+
"acc_stderr": 0.04379024936553893,
|
72 |
+
"acc_norm": 0.4732824427480916,
|
73 |
+
"acc_norm_stderr": 0.04379024936553893
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.4,
|
77 |
+
"acc_stderr": 0.049236596391733084,
|
78 |
+
"acc_norm": 0.4,
|
79 |
+
"acc_norm_stderr": 0.049236596391733084
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.45454545454545453,
|
83 |
+
"acc_stderr": 0.03547601494006937,
|
84 |
+
"acc_norm": 0.45454545454545453,
|
85 |
+
"acc_norm_stderr": 0.03547601494006937
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4896551724137931,
|
89 |
+
"acc_stderr": 0.041657747757287644,
|
90 |
+
"acc_norm": 0.4896551724137931,
|
91 |
+
"acc_norm_stderr": 0.041657747757287644
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.39215686274509803,
|
95 |
+
"acc_stderr": 0.04858083574266347,
|
96 |
+
"acc_norm": 0.39215686274509803,
|
97 |
+
"acc_norm_stderr": 0.04858083574266347
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.47058823529411764,
|
101 |
+
"acc_stderr": 0.03242225027115007,
|
102 |
+
"acc_norm": 0.47058823529411764,
|
103 |
+
"acc_norm_stderr": 0.03242225027115007
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.4666666666666667,
|
107 |
+
"acc_stderr": 0.025294608023986483,
|
108 |
+
"acc_norm": 0.4666666666666667,
|
109 |
+
"acc_norm_stderr": 0.025294608023986483
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.56,
|
113 |
+
"acc_stderr": 0.04988876515698589,
|
114 |
+
"acc_norm": 0.56,
|
115 |
+
"acc_norm_stderr": 0.04988876515698589
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.04824181513244218,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.04824181513244218
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.49074074074074076,
|
125 |
+
"acc_stderr": 0.04832853553437055,
|
126 |
+
"acc_norm": 0.49074074074074076,
|
127 |
+
"acc_norm_stderr": 0.04832853553437055
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.41379310344827586,
|
131 |
+
"acc_stderr": 0.03465304488406795,
|
132 |
+
"acc_norm": 0.41379310344827586,
|
133 |
+
"acc_norm_stderr": 0.03465304488406795
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.45806451612903226,
|
137 |
+
"acc_stderr": 0.02834378725054064,
|
138 |
+
"acc_norm": 0.45806451612903226,
|
139 |
+
"acc_norm_stderr": 0.02834378725054064
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6153846153846154,
|
143 |
+
"acc_stderr": 0.03187195347942466,
|
144 |
+
"acc_norm": 0.6153846153846154,
|
145 |
+
"acc_norm_stderr": 0.03187195347942466
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.4867924528301887,
|
149 |
+
"acc_stderr": 0.03076213487450049,
|
150 |
+
"acc_norm": 0.4867924528301887,
|
151 |
+
"acc_norm_stderr": 0.03076213487450049
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.42727272727272725,
|
155 |
+
"acc_stderr": 0.04738198703545483,
|
156 |
+
"acc_norm": 0.42727272727272725,
|
157 |
+
"acc_norm_stderr": 0.04738198703545483
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3296296296296296,
|
161 |
+
"acc_stderr": 0.028661201116524593,
|
162 |
+
"acc_norm": 0.3296296296296296,
|
163 |
+
"acc_norm_stderr": 0.028661201116524593
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.3576158940397351,
|
167 |
+
"acc_stderr": 0.03913453431177258,
|
168 |
+
"acc_norm": 0.3576158940397351,
|
169 |
+
"acc_norm_stderr": 0.03913453431177258
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5522388059701493,
|
173 |
+
"acc_stderr": 0.03516184772952168,
|
174 |
+
"acc_norm": 0.5522388059701493,
|
175 |
+
"acc_norm_stderr": 0.03516184772952168
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.4682080924855491,
|
179 |
+
"acc_stderr": 0.03804749744364764,
|
180 |
+
"acc_norm": 0.4682080924855491,
|
181 |
+
"acc_norm_stderr": 0.03804749744364764
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.36243386243386244,
|
185 |
+
"acc_stderr": 0.024757473902752045,
|
186 |
+
"acc_norm": 0.36243386243386244,
|
187 |
+
"acc_norm_stderr": 0.024757473902752045
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3819444444444444,
|
191 |
+
"acc_stderr": 0.040629907841466674,
|
192 |
+
"acc_norm": 0.3819444444444444,
|
193 |
+
"acc_norm_stderr": 0.040629907841466674
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.35,
|
197 |
+
"acc_stderr": 0.047937248544110196,
|
198 |
+
"acc_norm": 0.35,
|
199 |
+
"acc_norm_stderr": 0.047937248544110196
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.59,
|
203 |
+
"acc_stderr": 0.04943110704237101,
|
204 |
+
"acc_norm": 0.59,
|
205 |
+
"acc_norm_stderr": 0.04943110704237101
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.43352601156069365,
|
209 |
+
"acc_stderr": 0.026680134761679217,
|
210 |
+
"acc_norm": 0.43352601156069365,
|
211 |
+
"acc_norm_stderr": 0.026680134761679217
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.39263803680981596,
|
215 |
+
"acc_stderr": 0.03836740907831028,
|
216 |
+
"acc_norm": 0.39263803680981596,
|
217 |
+
"acc_norm_stderr": 0.03836740907831028
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.48148148148148145,
|
221 |
+
"acc_stderr": 0.027801656212323667,
|
222 |
+
"acc_norm": 0.48148148148148145,
|
223 |
+
"acc_norm_stderr": 0.027801656212323667
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.32,
|
227 |
+
"acc_stderr": 0.046882617226215034,
|
228 |
+
"acc_norm": 0.32,
|
229 |
+
"acc_norm_stderr": 0.046882617226215034
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.44559585492227977,
|
233 |
+
"acc_stderr": 0.03587014986075659,
|
234 |
+
"acc_norm": 0.44559585492227977,
|
235 |
+
"acc_norm_stderr": 0.03587014986075659
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.30701754385964913,
|
239 |
+
"acc_stderr": 0.0433913832257986,
|
240 |
+
"acc_norm": 0.30701754385964913,
|
241 |
+
"acc_norm_stderr": 0.0433913832257986
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5192660550458715,
|
245 |
+
"acc_stderr": 0.02142140298254889,
|
246 |
+
"acc_norm": 0.5192660550458715,
|
247 |
+
"acc_norm_stderr": 0.02142140298254889
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3888888888888889,
|
251 |
+
"acc_stderr": 0.04360314860077459,
|
252 |
+
"acc_norm": 0.3888888888888889,
|
253 |
+
"acc_norm_stderr": 0.04360314860077459
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.5196078431372549,
|
257 |
+
"acc_stderr": 0.028607893699576066,
|
258 |
+
"acc_norm": 0.5196078431372549,
|
259 |
+
"acc_norm_stderr": 0.028607893699576066
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.56,
|
263 |
+
"acc_stderr": 0.0498887651569859,
|
264 |
+
"acc_norm": 0.56,
|
265 |
+
"acc_norm_stderr": 0.0498887651569859
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6115702479338843,
|
269 |
+
"acc_stderr": 0.044492703500683815,
|
270 |
+
"acc_norm": 0.6115702479338843,
|
271 |
+
"acc_norm_stderr": 0.044492703500683815
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.48026315789473684,
|
275 |
+
"acc_stderr": 0.040657710025626036,
|
276 |
+
"acc_norm": 0.48026315789473684,
|
277 |
+
"acc_norm_stderr": 0.040657710025626036
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3937908496732026,
|
281 |
+
"acc_stderr": 0.019766211991073063,
|
282 |
+
"acc_norm": 0.3937908496732026,
|
283 |
+
"acc_norm_stderr": 0.019766211991073063
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3723404255319149,
|
287 |
+
"acc_stderr": 0.02883892147125145,
|
288 |
+
"acc_norm": 0.3723404255319149,
|
289 |
+
"acc_norm_stderr": 0.02883892147125145
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.3482142857142857,
|
293 |
+
"acc_stderr": 0.04521829902833586,
|
294 |
+
"acc_norm": 0.3482142857142857,
|
295 |
+
"acc_norm_stderr": 0.04521829902833586
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.39351851851851855,
|
299 |
+
"acc_stderr": 0.03331747876370312,
|
300 |
+
"acc_norm": 0.39351851851851855,
|
301 |
+
"acc_norm_stderr": 0.03331747876370312
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2770949720670391,
|
305 |
+
"acc_stderr": 0.014968772435812143,
|
306 |
+
"acc_norm": 0.2770949720670391,
|
307 |
+
"acc_norm_stderr": 0.014968772435812143
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.39,
|
311 |
+
"acc_stderr": 0.04902071300001975,
|
312 |
+
"acc_norm": 0.39,
|
313 |
+
"acc_norm_stderr": 0.04902071300001975
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.55,
|
317 |
+
"acc_stderr": 0.04999999999999999,
|
318 |
+
"acc_norm": 0.55,
|
319 |
+
"acc_norm_stderr": 0.04999999999999999
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.33088235294117646,
|
323 |
+
"acc_stderr": 0.028582709753898445,
|
324 |
+
"acc_norm": 0.33088235294117646,
|
325 |
+
"acc_norm_stderr": 0.028582709753898445
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5469387755102041,
|
329 |
+
"acc_stderr": 0.031867859300041275,
|
330 |
+
"acc_norm": 0.5469387755102041,
|
331 |
+
"acc_norm_stderr": 0.031867859300041275
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5485232067510548,
|
335 |
+
"acc_stderr": 0.032393600173974704,
|
336 |
+
"acc_norm": 0.5485232067510548,
|
337 |
+
"acc_norm_stderr": 0.032393600173974704
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.3005215123859192,
|
341 |
+
"acc_stderr": 0.011709918883039117,
|
342 |
+
"acc_norm": 0.3005215123859192,
|
343 |
+
"acc_norm_stderr": 0.011709918883039117
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.46568627450980393,
|
347 |
+
"acc_stderr": 0.03501038327635897,
|
348 |
+
"acc_norm": 0.46568627450980393,
|
349 |
+
"acc_norm_stderr": 0.03501038327635897
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5393939393939394,
|
353 |
+
"acc_stderr": 0.03892207016552012,
|
354 |
+
"acc_norm": 0.5393939393939394,
|
355 |
+
"acc_norm_stderr": 0.03892207016552012
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2827417380660955,
|
359 |
+
"mc1_stderr": 0.01576477083677731,
|
360 |
+
"mc2": 0.4661586592281064,
|
361 |
+
"mc2_stderr": 0.016330593604368164
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.40613931523022434,
|
365 |
+
"acc_stderr": 0.016884749503191392,
|
366 |
+
"acc_norm": 0.43919716646989376,
|
367 |
+
"acc_norm_stderr": 0.017062775744780705
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8b-sl4",
|
436 |
+
"model_sha": "25a488e868ecf3d10fef14a534257043b895df7e",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8b-sl5/result_2024-07-16 00:48:26.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.33532423208191126,
|
5 |
+
"acc_stderr": 0.013796182947785564,
|
6 |
+
"acc_norm": 0.3771331058020478,
|
7 |
+
"acc_norm_stderr": 0.014163366896192584
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.3536148177653854,
|
11 |
+
"acc_stderr": 0.004771143074426136,
|
12 |
+
"acc_norm": 0.4545907189802828,
|
13 |
+
"acc_norm_stderr": 0.004969160917379652
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.4502923976608187,
|
17 |
+
"acc_stderr": 0.038158273659132366,
|
18 |
+
"acc_norm": 0.4502923976608187,
|
19 |
+
"acc_norm_stderr": 0.038158273659132366
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5436893203883495,
|
23 |
+
"acc_stderr": 0.049318019942204146,
|
24 |
+
"acc_norm": 0.5436893203883495,
|
25 |
+
"acc_norm_stderr": 0.049318019942204146
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.3895274584929757,
|
29 |
+
"acc_stderr": 0.01743808255626459,
|
30 |
+
"acc_norm": 0.3895274584929757,
|
31 |
+
"acc_norm_stderr": 0.01743808255626459
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.37037037037037035,
|
35 |
+
"acc_stderr": 0.04171654161354543,
|
36 |
+
"acc_norm": 0.37037037037037035,
|
37 |
+
"acc_norm_stderr": 0.04171654161354543
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.22,
|
41 |
+
"acc_stderr": 0.0416333199893227,
|
42 |
+
"acc_norm": 0.22,
|
43 |
+
"acc_norm_stderr": 0.0416333199893227
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.425531914893617,
|
47 |
+
"acc_stderr": 0.032321469162244675,
|
48 |
+
"acc_norm": 0.425531914893617,
|
49 |
+
"acc_norm_stderr": 0.032321469162244675
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.37349397590361444,
|
53 |
+
"acc_stderr": 0.03765845117168863,
|
54 |
+
"acc_norm": 0.37349397590361444,
|
55 |
+
"acc_norm_stderr": 0.03765845117168863
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.49517684887459806,
|
59 |
+
"acc_stderr": 0.028396770444111288,
|
60 |
+
"acc_norm": 0.49517684887459806,
|
61 |
+
"acc_norm_stderr": 0.028396770444111288
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.42152466367713004,
|
65 |
+
"acc_stderr": 0.033141902221106564,
|
66 |
+
"acc_norm": 0.42152466367713004,
|
67 |
+
"acc_norm_stderr": 0.033141902221106564
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.48091603053435117,
|
71 |
+
"acc_stderr": 0.04382094705550988,
|
72 |
+
"acc_norm": 0.48091603053435117,
|
73 |
+
"acc_norm_stderr": 0.04382094705550988
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.42,
|
77 |
+
"acc_stderr": 0.04960449637488583,
|
78 |
+
"acc_norm": 0.42,
|
79 |
+
"acc_norm_stderr": 0.04960449637488583
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.46464646464646464,
|
83 |
+
"acc_stderr": 0.035534363688280626,
|
84 |
+
"acc_norm": 0.46464646464646464,
|
85 |
+
"acc_norm_stderr": 0.035534363688280626
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.6068965517241379,
|
89 |
+
"acc_stderr": 0.040703290137070705,
|
90 |
+
"acc_norm": 0.6068965517241379,
|
91 |
+
"acc_norm_stderr": 0.040703290137070705
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.3137254901960784,
|
95 |
+
"acc_stderr": 0.04617034827006716,
|
96 |
+
"acc_norm": 0.3137254901960784,
|
97 |
+
"acc_norm_stderr": 0.04617034827006716
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.4831932773109244,
|
101 |
+
"acc_stderr": 0.03246013680375308,
|
102 |
+
"acc_norm": 0.4831932773109244,
|
103 |
+
"acc_norm_stderr": 0.03246013680375308
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.47435897435897434,
|
107 |
+
"acc_stderr": 0.025317649726448652,
|
108 |
+
"acc_norm": 0.47435897435897434,
|
109 |
+
"acc_norm_stderr": 0.025317649726448652
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.54,
|
113 |
+
"acc_stderr": 0.05009082659620332,
|
114 |
+
"acc_norm": 0.54,
|
115 |
+
"acc_norm_stderr": 0.05009082659620332
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.38,
|
119 |
+
"acc_stderr": 0.04878317312145633,
|
120 |
+
"acc_norm": 0.38,
|
121 |
+
"acc_norm_stderr": 0.04878317312145633
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.5555555555555556,
|
125 |
+
"acc_stderr": 0.04803752235190193,
|
126 |
+
"acc_norm": 0.5555555555555556,
|
127 |
+
"acc_norm_stderr": 0.04803752235190193
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.4088669950738916,
|
131 |
+
"acc_stderr": 0.03459058815883233,
|
132 |
+
"acc_norm": 0.4088669950738916,
|
133 |
+
"acc_norm_stderr": 0.03459058815883233
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.49032258064516127,
|
137 |
+
"acc_stderr": 0.02843867799890955,
|
138 |
+
"acc_norm": 0.49032258064516127,
|
139 |
+
"acc_norm_stderr": 0.02843867799890955
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6709401709401709,
|
143 |
+
"acc_stderr": 0.030782321577688173,
|
144 |
+
"acc_norm": 0.6709401709401709,
|
145 |
+
"acc_norm_stderr": 0.030782321577688173
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.44150943396226416,
|
149 |
+
"acc_stderr": 0.03056159042673183,
|
150 |
+
"acc_norm": 0.44150943396226416,
|
151 |
+
"acc_norm_stderr": 0.03056159042673183
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.44545454545454544,
|
155 |
+
"acc_stderr": 0.047605488214603246,
|
156 |
+
"acc_norm": 0.44545454545454544,
|
157 |
+
"acc_norm_stderr": 0.047605488214603246
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.35555555555555557,
|
161 |
+
"acc_stderr": 0.029185714949857403,
|
162 |
+
"acc_norm": 0.35555555555555557,
|
163 |
+
"acc_norm_stderr": 0.029185714949857403
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.33774834437086093,
|
167 |
+
"acc_stderr": 0.038615575462551684,
|
168 |
+
"acc_norm": 0.33774834437086093,
|
169 |
+
"acc_norm_stderr": 0.038615575462551684
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.5870646766169154,
|
173 |
+
"acc_stderr": 0.03481520803367348,
|
174 |
+
"acc_norm": 0.5870646766169154,
|
175 |
+
"acc_norm_stderr": 0.03481520803367348
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.35260115606936415,
|
179 |
+
"acc_stderr": 0.03643037168958548,
|
180 |
+
"acc_norm": 0.35260115606936415,
|
181 |
+
"acc_norm_stderr": 0.03643037168958548
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.38095238095238093,
|
185 |
+
"acc_stderr": 0.025010749116137605,
|
186 |
+
"acc_norm": 0.38095238095238093,
|
187 |
+
"acc_norm_stderr": 0.025010749116137605
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.4652777777777778,
|
191 |
+
"acc_stderr": 0.04171115858181618,
|
192 |
+
"acc_norm": 0.4652777777777778,
|
193 |
+
"acc_norm_stderr": 0.04171115858181618
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.39,
|
197 |
+
"acc_stderr": 0.04902071300001975,
|
198 |
+
"acc_norm": 0.39,
|
199 |
+
"acc_norm_stderr": 0.04902071300001975
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.64,
|
203 |
+
"acc_stderr": 0.048241815132442176,
|
204 |
+
"acc_norm": 0.64,
|
205 |
+
"acc_norm_stderr": 0.048241815132442176
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.4682080924855491,
|
209 |
+
"acc_stderr": 0.026864624366756646,
|
210 |
+
"acc_norm": 0.4682080924855491,
|
211 |
+
"acc_norm_stderr": 0.026864624366756646
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.4233128834355828,
|
215 |
+
"acc_stderr": 0.03881891213334383,
|
216 |
+
"acc_norm": 0.4233128834355828,
|
217 |
+
"acc_norm_stderr": 0.03881891213334383
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.49691358024691357,
|
221 |
+
"acc_stderr": 0.027820214158594377,
|
222 |
+
"acc_norm": 0.49691358024691357,
|
223 |
+
"acc_norm_stderr": 0.027820214158594377
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.37,
|
227 |
+
"acc_stderr": 0.04852365870939099,
|
228 |
+
"acc_norm": 0.37,
|
229 |
+
"acc_norm_stderr": 0.04852365870939099
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.5025906735751295,
|
233 |
+
"acc_stderr": 0.03608390745384487,
|
234 |
+
"acc_norm": 0.5025906735751295,
|
235 |
+
"acc_norm_stderr": 0.03608390745384487
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.3684210526315789,
|
239 |
+
"acc_stderr": 0.04537815354939391,
|
240 |
+
"acc_norm": 0.3684210526315789,
|
241 |
+
"acc_norm_stderr": 0.04537815354939391
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.5376146788990825,
|
245 |
+
"acc_stderr": 0.021376575274397576,
|
246 |
+
"acc_norm": 0.5376146788990825,
|
247 |
+
"acc_norm_stderr": 0.021376575274397576
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.3888888888888889,
|
251 |
+
"acc_stderr": 0.04360314860077459,
|
252 |
+
"acc_norm": 0.3888888888888889,
|
253 |
+
"acc_norm_stderr": 0.04360314860077459
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.4934640522875817,
|
257 |
+
"acc_stderr": 0.028627470550556047,
|
258 |
+
"acc_norm": 0.4934640522875817,
|
259 |
+
"acc_norm_stderr": 0.028627470550556047
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.61,
|
263 |
+
"acc_stderr": 0.04902071300001975,
|
264 |
+
"acc_norm": 0.61,
|
265 |
+
"acc_norm_stderr": 0.04902071300001975
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.6528925619834711,
|
269 |
+
"acc_stderr": 0.04345724570292535,
|
270 |
+
"acc_norm": 0.6528925619834711,
|
271 |
+
"acc_norm_stderr": 0.04345724570292535
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.40789473684210525,
|
275 |
+
"acc_stderr": 0.03999309712777473,
|
276 |
+
"acc_norm": 0.40789473684210525,
|
277 |
+
"acc_norm_stderr": 0.03999309712777473
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3839869281045752,
|
281 |
+
"acc_stderr": 0.019675808135281515,
|
282 |
+
"acc_norm": 0.3839869281045752,
|
283 |
+
"acc_norm_stderr": 0.019675808135281515
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3333333333333333,
|
287 |
+
"acc_stderr": 0.028121636040639886,
|
288 |
+
"acc_norm": 0.3333333333333333,
|
289 |
+
"acc_norm_stderr": 0.028121636040639886
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.32142857142857145,
|
293 |
+
"acc_stderr": 0.0443280405529152,
|
294 |
+
"acc_norm": 0.32142857142857145,
|
295 |
+
"acc_norm_stderr": 0.0443280405529152
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.38425925925925924,
|
299 |
+
"acc_stderr": 0.03317354514310742,
|
300 |
+
"acc_norm": 0.38425925925925924,
|
301 |
+
"acc_norm_stderr": 0.03317354514310742
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.25139664804469275,
|
305 |
+
"acc_stderr": 0.014508979453553974,
|
306 |
+
"acc_norm": 0.25139664804469275,
|
307 |
+
"acc_norm_stderr": 0.014508979453553974
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.45,
|
311 |
+
"acc_stderr": 0.04999999999999999,
|
312 |
+
"acc_norm": 0.45,
|
313 |
+
"acc_norm_stderr": 0.04999999999999999
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.64,
|
317 |
+
"acc_stderr": 0.048241815132442176,
|
318 |
+
"acc_norm": 0.64,
|
319 |
+
"acc_norm_stderr": 0.048241815132442176
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.35661764705882354,
|
323 |
+
"acc_stderr": 0.02909720956841195,
|
324 |
+
"acc_norm": 0.35661764705882354,
|
325 |
+
"acc_norm_stderr": 0.02909720956841195
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.5877551020408164,
|
329 |
+
"acc_stderr": 0.031512360446742695,
|
330 |
+
"acc_norm": 0.5877551020408164,
|
331 |
+
"acc_norm_stderr": 0.031512360446742695
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5907172995780591,
|
335 |
+
"acc_stderr": 0.032007041833595914,
|
336 |
+
"acc_norm": 0.5907172995780591,
|
337 |
+
"acc_norm_stderr": 0.032007041833595914
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.34615384615384615,
|
341 |
+
"acc_stderr": 0.012150699768228575,
|
342 |
+
"acc_norm": 0.34615384615384615,
|
343 |
+
"acc_norm_stderr": 0.012150699768228575
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.47549019607843135,
|
347 |
+
"acc_stderr": 0.035050931943487976,
|
348 |
+
"acc_norm": 0.47549019607843135,
|
349 |
+
"acc_norm_stderr": 0.035050931943487976
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.5575757575757576,
|
353 |
+
"acc_stderr": 0.038783721137112745,
|
354 |
+
"acc_norm": 0.5575757575757576,
|
355 |
+
"acc_norm_stderr": 0.038783721137112745
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.28886168910648713,
|
359 |
+
"mc1_stderr": 0.015866346401384308,
|
360 |
+
"mc2": 0.4858897187001004,
|
361 |
+
"mc2_stderr": 0.016250190279064488
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.4132231404958678,
|
365 |
+
"acc_stderr": 0.01692948023449523,
|
366 |
+
"acc_norm": 0.42857142857142855,
|
367 |
+
"acc_norm_stderr": 0.01701403811929749
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8b-sl5",
|
436 |
+
"model_sha": "75a5820ba69a2def1bee5341d49082ee1372db27",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8b-sl6_1/result_2024-07-16 04:26:59.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.19965870307167236,
|
5 |
+
"acc_stderr": 0.01168162575688869,
|
6 |
+
"acc_norm": 0.26109215017064846,
|
7 |
+
"acc_norm_stderr": 0.012835523909473843
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.262796255725951,
|
11 |
+
"acc_stderr": 0.004392531344297395,
|
12 |
+
"acc_norm": 0.28809002190798644,
|
13 |
+
"acc_norm_stderr": 0.004519476835646786
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.42105263157894735,
|
17 |
+
"acc_stderr": 0.037867207062342145,
|
18 |
+
"acc_norm": 0.42105263157894735,
|
19 |
+
"acc_norm_stderr": 0.037867207062342145
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2815533980582524,
|
23 |
+
"acc_stderr": 0.04453254836326466,
|
24 |
+
"acc_norm": 0.2815533980582524,
|
25 |
+
"acc_norm_stderr": 0.04453254836326466
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.35759897828863346,
|
29 |
+
"acc_stderr": 0.017139488998803302,
|
30 |
+
"acc_norm": 0.35759897828863346,
|
31 |
+
"acc_norm_stderr": 0.017139488998803302
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.32592592592592595,
|
35 |
+
"acc_stderr": 0.04049122041702506,
|
36 |
+
"acc_norm": 0.32592592592592595,
|
37 |
+
"acc_norm_stderr": 0.04049122041702506
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.24,
|
41 |
+
"acc_stderr": 0.04292346959909284,
|
42 |
+
"acc_norm": 0.24,
|
43 |
+
"acc_norm_stderr": 0.04292346959909284
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3574468085106383,
|
47 |
+
"acc_stderr": 0.03132941789476425,
|
48 |
+
"acc_norm": 0.3574468085106383,
|
49 |
+
"acc_norm_stderr": 0.03132941789476425
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.35542168674698793,
|
53 |
+
"acc_stderr": 0.03726214354322415,
|
54 |
+
"acc_norm": 0.35542168674698793,
|
55 |
+
"acc_norm_stderr": 0.03726214354322415
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.33440514469453375,
|
59 |
+
"acc_stderr": 0.026795422327893937,
|
60 |
+
"acc_norm": 0.33440514469453375,
|
61 |
+
"acc_norm_stderr": 0.026795422327893937
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.27802690582959644,
|
65 |
+
"acc_stderr": 0.030069584874494047,
|
66 |
+
"acc_norm": 0.27802690582959644,
|
67 |
+
"acc_norm_stderr": 0.030069584874494047
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.3435114503816794,
|
71 |
+
"acc_stderr": 0.041649760719448786,
|
72 |
+
"acc_norm": 0.3435114503816794,
|
73 |
+
"acc_norm_stderr": 0.041649760719448786
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.43,
|
77 |
+
"acc_stderr": 0.049756985195624284,
|
78 |
+
"acc_norm": 0.43,
|
79 |
+
"acc_norm_stderr": 0.049756985195624284
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.3838383838383838,
|
83 |
+
"acc_stderr": 0.034648816750163375,
|
84 |
+
"acc_norm": 0.3838383838383838,
|
85 |
+
"acc_norm_stderr": 0.034648816750163375
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.43448275862068964,
|
89 |
+
"acc_stderr": 0.041307408795554966,
|
90 |
+
"acc_norm": 0.43448275862068964,
|
91 |
+
"acc_norm_stderr": 0.041307408795554966
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.2647058823529412,
|
95 |
+
"acc_stderr": 0.04389869956808777,
|
96 |
+
"acc_norm": 0.2647058823529412,
|
97 |
+
"acc_norm_stderr": 0.04389869956808777
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.40756302521008403,
|
101 |
+
"acc_stderr": 0.03191863374478466,
|
102 |
+
"acc_norm": 0.40756302521008403,
|
103 |
+
"acc_norm_stderr": 0.03191863374478466
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.3333333333333333,
|
107 |
+
"acc_stderr": 0.02390115797940254,
|
108 |
+
"acc_norm": 0.3333333333333333,
|
109 |
+
"acc_norm_stderr": 0.02390115797940254
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.47,
|
113 |
+
"acc_stderr": 0.050161355804659205,
|
114 |
+
"acc_norm": 0.47,
|
115 |
+
"acc_norm_stderr": 0.050161355804659205
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.3,
|
119 |
+
"acc_stderr": 0.046056618647183814,
|
120 |
+
"acc_norm": 0.3,
|
121 |
+
"acc_norm_stderr": 0.046056618647183814
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.37962962962962965,
|
125 |
+
"acc_stderr": 0.04691521224077742,
|
126 |
+
"acc_norm": 0.37962962962962965,
|
127 |
+
"acc_norm_stderr": 0.04691521224077742
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35960591133004927,
|
131 |
+
"acc_stderr": 0.03376458246509567,
|
132 |
+
"acc_norm": 0.35960591133004927,
|
133 |
+
"acc_norm_stderr": 0.03376458246509567
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.36129032258064514,
|
137 |
+
"acc_stderr": 0.027327548447957532,
|
138 |
+
"acc_norm": 0.36129032258064514,
|
139 |
+
"acc_norm_stderr": 0.027327548447957532
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.36752136752136755,
|
143 |
+
"acc_stderr": 0.03158539157745636,
|
144 |
+
"acc_norm": 0.36752136752136755,
|
145 |
+
"acc_norm_stderr": 0.03158539157745636
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.3283018867924528,
|
149 |
+
"acc_stderr": 0.028901593612411784,
|
150 |
+
"acc_norm": 0.3283018867924528,
|
151 |
+
"acc_norm_stderr": 0.028901593612411784
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.32727272727272727,
|
155 |
+
"acc_stderr": 0.04494290866252088,
|
156 |
+
"acc_norm": 0.32727272727272727,
|
157 |
+
"acc_norm_stderr": 0.04494290866252088
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3148148148148148,
|
161 |
+
"acc_stderr": 0.028317533496066465,
|
162 |
+
"acc_norm": 0.3148148148148148,
|
163 |
+
"acc_norm_stderr": 0.028317533496066465
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2781456953642384,
|
167 |
+
"acc_stderr": 0.03658603262763743,
|
168 |
+
"acc_norm": 0.2781456953642384,
|
169 |
+
"acc_norm_stderr": 0.03658603262763743
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.39800995024875624,
|
173 |
+
"acc_stderr": 0.03461199429040014,
|
174 |
+
"acc_norm": 0.39800995024875624,
|
175 |
+
"acc_norm_stderr": 0.03461199429040014
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.31213872832369943,
|
179 |
+
"acc_stderr": 0.035331333893236574,
|
180 |
+
"acc_norm": 0.31213872832369943,
|
181 |
+
"acc_norm_stderr": 0.035331333893236574
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.328042328042328,
|
185 |
+
"acc_stderr": 0.0241804971643769,
|
186 |
+
"acc_norm": 0.328042328042328,
|
187 |
+
"acc_norm_stderr": 0.0241804971643769
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2222222222222222,
|
191 |
+
"acc_stderr": 0.03476590104304134,
|
192 |
+
"acc_norm": 0.2222222222222222,
|
193 |
+
"acc_norm_stderr": 0.03476590104304134
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.3,
|
197 |
+
"acc_stderr": 0.046056618647183814,
|
198 |
+
"acc_norm": 0.3,
|
199 |
+
"acc_norm_stderr": 0.046056618647183814
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.44,
|
203 |
+
"acc_stderr": 0.04988876515698589,
|
204 |
+
"acc_norm": 0.44,
|
205 |
+
"acc_norm_stderr": 0.04988876515698589
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.32947976878612717,
|
209 |
+
"acc_stderr": 0.025305258131879706,
|
210 |
+
"acc_norm": 0.32947976878612717,
|
211 |
+
"acc_norm_stderr": 0.025305258131879706
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3006134969325153,
|
215 |
+
"acc_stderr": 0.03602511318806771,
|
216 |
+
"acc_norm": 0.3006134969325153,
|
217 |
+
"acc_norm_stderr": 0.03602511318806771
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.2962962962962963,
|
221 |
+
"acc_stderr": 0.02540719779889017,
|
222 |
+
"acc_norm": 0.2962962962962963,
|
223 |
+
"acc_norm_stderr": 0.02540719779889017
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.33,
|
227 |
+
"acc_stderr": 0.04725815626252605,
|
228 |
+
"acc_norm": 0.33,
|
229 |
+
"acc_norm_stderr": 0.04725815626252605
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.29015544041450775,
|
233 |
+
"acc_stderr": 0.03275264467791516,
|
234 |
+
"acc_norm": 0.29015544041450775,
|
235 |
+
"acc_norm_stderr": 0.03275264467791516
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2631578947368421,
|
239 |
+
"acc_stderr": 0.0414243971948936,
|
240 |
+
"acc_norm": 0.2631578947368421,
|
241 |
+
"acc_norm_stderr": 0.0414243971948936
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3339449541284404,
|
245 |
+
"acc_stderr": 0.020220554196736403,
|
246 |
+
"acc_norm": 0.3339449541284404,
|
247 |
+
"acc_norm_stderr": 0.020220554196736403
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2857142857142857,
|
251 |
+
"acc_stderr": 0.0404061017820884,
|
252 |
+
"acc_norm": 0.2857142857142857,
|
253 |
+
"acc_norm_stderr": 0.0404061017820884
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.38235294117647056,
|
257 |
+
"acc_stderr": 0.027826109307283686,
|
258 |
+
"acc_norm": 0.38235294117647056,
|
259 |
+
"acc_norm_stderr": 0.027826109307283686
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.32,
|
263 |
+
"acc_stderr": 0.046882617226215034,
|
264 |
+
"acc_norm": 0.32,
|
265 |
+
"acc_norm_stderr": 0.046882617226215034
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5041322314049587,
|
269 |
+
"acc_stderr": 0.045641987674327526,
|
270 |
+
"acc_norm": 0.5041322314049587,
|
271 |
+
"acc_norm_stderr": 0.045641987674327526
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.42105263157894735,
|
275 |
+
"acc_stderr": 0.04017901275981748,
|
276 |
+
"acc_norm": 0.42105263157894735,
|
277 |
+
"acc_norm_stderr": 0.04017901275981748
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.29411764705882354,
|
281 |
+
"acc_stderr": 0.018433427649401896,
|
282 |
+
"acc_norm": 0.29411764705882354,
|
283 |
+
"acc_norm_stderr": 0.018433427649401896
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.3262411347517731,
|
287 |
+
"acc_stderr": 0.027968453043563168,
|
288 |
+
"acc_norm": 0.3262411347517731,
|
289 |
+
"acc_norm_stderr": 0.027968453043563168
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.3125,
|
293 |
+
"acc_stderr": 0.043994650575715215,
|
294 |
+
"acc_norm": 0.3125,
|
295 |
+
"acc_norm_stderr": 0.043994650575715215
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.39351851851851855,
|
299 |
+
"acc_stderr": 0.03331747876370312,
|
300 |
+
"acc_norm": 0.39351851851851855,
|
301 |
+
"acc_norm_stderr": 0.03331747876370312
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2636871508379888,
|
305 |
+
"acc_stderr": 0.014736926383761983,
|
306 |
+
"acc_norm": 0.2636871508379888,
|
307 |
+
"acc_norm_stderr": 0.014736926383761983
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.36,
|
311 |
+
"acc_stderr": 0.04824181513244218,
|
312 |
+
"acc_norm": 0.36,
|
313 |
+
"acc_norm_stderr": 0.04824181513244218
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.46,
|
317 |
+
"acc_stderr": 0.05009082659620332,
|
318 |
+
"acc_norm": 0.46,
|
319 |
+
"acc_norm_stderr": 0.05009082659620332
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3713235294117647,
|
323 |
+
"acc_stderr": 0.02934980313976587,
|
324 |
+
"acc_norm": 0.3713235294117647,
|
325 |
+
"acc_norm_stderr": 0.02934980313976587
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4448979591836735,
|
329 |
+
"acc_stderr": 0.031814251181977865,
|
330 |
+
"acc_norm": 0.4448979591836735,
|
331 |
+
"acc_norm_stderr": 0.031814251181977865
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.33755274261603374,
|
335 |
+
"acc_stderr": 0.03078154910202622,
|
336 |
+
"acc_norm": 0.33755274261603374,
|
337 |
+
"acc_norm_stderr": 0.03078154910202622
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.303129074315515,
|
341 |
+
"acc_stderr": 0.011738669951254298,
|
342 |
+
"acc_norm": 0.303129074315515,
|
343 |
+
"acc_norm_stderr": 0.011738669951254298
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.3480392156862745,
|
347 |
+
"acc_stderr": 0.03343311240488418,
|
348 |
+
"acc_norm": 0.3480392156862745,
|
349 |
+
"acc_norm_stderr": 0.03343311240488418
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3515151515151515,
|
353 |
+
"acc_stderr": 0.037282069986826503,
|
354 |
+
"acc_norm": 0.3515151515151515,
|
355 |
+
"acc_norm_stderr": 0.037282069986826503
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2484700122399021,
|
359 |
+
"mc1_stderr": 0.015127427096520688,
|
360 |
+
"mc2": 0.4886424856035836,
|
361 |
+
"mc2_stderr": 0.016537537410601783
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.12160566706021252,
|
365 |
+
"acc_stderr": 0.011236640546845988,
|
366 |
+
"acc_norm": 0.2644628099173554,
|
367 |
+
"acc_norm_stderr": 0.015163499477892412
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8b-sl6_1",
|
436 |
+
"model_sha": "82835f842c382a868f4c6f5fac4baa1d80c1cfab",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-eng-ko-8b/result_2024-06-24 08:56:27.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2525597269624573,
|
5 |
+
"acc_stderr": 0.012696728980207708,
|
6 |
+
"acc_norm": 0.31399317406143346,
|
7 |
+
"acc_norm_stderr": 0.013562691224726304
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.32752439753037244,
|
11 |
+
"acc_stderr": 0.004683511716552236,
|
12 |
+
"acc_norm": 0.40967934674367656,
|
13 |
+
"acc_norm_stderr": 0.0049076947279356915
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.42105263157894735,
|
17 |
+
"acc_stderr": 0.037867207062342145,
|
18 |
+
"acc_norm": 0.42105263157894735,
|
19 |
+
"acc_norm_stderr": 0.037867207062342145
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2912621359223301,
|
23 |
+
"acc_stderr": 0.044986763205729224,
|
24 |
+
"acc_norm": 0.2912621359223301,
|
25 |
+
"acc_norm_stderr": 0.044986763205729224
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.36015325670498083,
|
29 |
+
"acc_stderr": 0.0171663624713693,
|
30 |
+
"acc_norm": 0.36015325670498083,
|
31 |
+
"acc_norm_stderr": 0.0171663624713693
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.28888888888888886,
|
35 |
+
"acc_stderr": 0.0391545063041425,
|
36 |
+
"acc_norm": 0.28888888888888886,
|
37 |
+
"acc_norm_stderr": 0.0391545063041425
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.37,
|
41 |
+
"acc_stderr": 0.048523658709391,
|
42 |
+
"acc_norm": 0.37,
|
43 |
+
"acc_norm_stderr": 0.048523658709391
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3659574468085106,
|
47 |
+
"acc_stderr": 0.0314895582974553,
|
48 |
+
"acc_norm": 0.3659574468085106,
|
49 |
+
"acc_norm_stderr": 0.0314895582974553
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.28313253012048195,
|
53 |
+
"acc_stderr": 0.03507295431370518,
|
54 |
+
"acc_norm": 0.28313253012048195,
|
55 |
+
"acc_norm_stderr": 0.03507295431370518
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.33762057877813506,
|
59 |
+
"acc_stderr": 0.026858825879488544,
|
60 |
+
"acc_norm": 0.33762057877813506,
|
61 |
+
"acc_norm_stderr": 0.026858825879488544
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.34977578475336324,
|
65 |
+
"acc_stderr": 0.03200736719484503,
|
66 |
+
"acc_norm": 0.34977578475336324,
|
67 |
+
"acc_norm_stderr": 0.03200736719484503
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.32061068702290074,
|
71 |
+
"acc_stderr": 0.04093329229834278,
|
72 |
+
"acc_norm": 0.32061068702290074,
|
73 |
+
"acc_norm_stderr": 0.04093329229834278
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.36,
|
77 |
+
"acc_stderr": 0.04824181513244218,
|
78 |
+
"acc_norm": 0.36,
|
79 |
+
"acc_norm_stderr": 0.04824181513244218
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.3383838383838384,
|
83 |
+
"acc_stderr": 0.03371124142626303,
|
84 |
+
"acc_norm": 0.3383838383838384,
|
85 |
+
"acc_norm_stderr": 0.03371124142626303
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.5379310344827586,
|
89 |
+
"acc_stderr": 0.04154659671707548,
|
90 |
+
"acc_norm": 0.5379310344827586,
|
91 |
+
"acc_norm_stderr": 0.04154659671707548
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.20588235294117646,
|
95 |
+
"acc_stderr": 0.04023382273617749,
|
96 |
+
"acc_norm": 0.20588235294117646,
|
97 |
+
"acc_norm_stderr": 0.04023382273617749
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.37815126050420167,
|
101 |
+
"acc_stderr": 0.031499305777849054,
|
102 |
+
"acc_norm": 0.37815126050420167,
|
103 |
+
"acc_norm_stderr": 0.031499305777849054
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.3153846153846154,
|
107 |
+
"acc_stderr": 0.02355964698318995,
|
108 |
+
"acc_norm": 0.3153846153846154,
|
109 |
+
"acc_norm_stderr": 0.02355964698318995
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.47,
|
113 |
+
"acc_stderr": 0.05016135580465919,
|
114 |
+
"acc_norm": 0.47,
|
115 |
+
"acc_norm_stderr": 0.05016135580465919
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.048241815132442176,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.048241815132442176
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.39814814814814814,
|
125 |
+
"acc_stderr": 0.04732332615978815,
|
126 |
+
"acc_norm": 0.39814814814814814,
|
127 |
+
"acc_norm_stderr": 0.04732332615978815
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.3399014778325123,
|
131 |
+
"acc_stderr": 0.033327690684107895,
|
132 |
+
"acc_norm": 0.3399014778325123,
|
133 |
+
"acc_norm_stderr": 0.033327690684107895
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.38387096774193546,
|
137 |
+
"acc_stderr": 0.027666182075539635,
|
138 |
+
"acc_norm": 0.38387096774193546,
|
139 |
+
"acc_norm_stderr": 0.027666182075539635
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.5598290598290598,
|
143 |
+
"acc_stderr": 0.032520741720630506,
|
144 |
+
"acc_norm": 0.5598290598290598,
|
145 |
+
"acc_norm_stderr": 0.032520741720630506
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.39622641509433965,
|
149 |
+
"acc_stderr": 0.03010279378179119,
|
150 |
+
"acc_norm": 0.39622641509433965,
|
151 |
+
"acc_norm_stderr": 0.03010279378179119
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4,
|
155 |
+
"acc_stderr": 0.0469237132203465,
|
156 |
+
"acc_norm": 0.4,
|
157 |
+
"acc_norm_stderr": 0.0469237132203465
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.34814814814814815,
|
161 |
+
"acc_stderr": 0.02904560029061626,
|
162 |
+
"acc_norm": 0.34814814814814815,
|
163 |
+
"acc_norm_stderr": 0.02904560029061626
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2847682119205298,
|
167 |
+
"acc_stderr": 0.03684881521389023,
|
168 |
+
"acc_norm": 0.2847682119205298,
|
169 |
+
"acc_norm_stderr": 0.03684881521389023
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.40298507462686567,
|
173 |
+
"acc_stderr": 0.034683432951111266,
|
174 |
+
"acc_norm": 0.40298507462686567,
|
175 |
+
"acc_norm_stderr": 0.034683432951111266
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.31213872832369943,
|
179 |
+
"acc_stderr": 0.035331333893236574,
|
180 |
+
"acc_norm": 0.31213872832369943,
|
181 |
+
"acc_norm_stderr": 0.035331333893236574
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.3253968253968254,
|
185 |
+
"acc_stderr": 0.024130158299762613,
|
186 |
+
"acc_norm": 0.3253968253968254,
|
187 |
+
"acc_norm_stderr": 0.024130158299762613
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2847222222222222,
|
191 |
+
"acc_stderr": 0.03773809990686935,
|
192 |
+
"acc_norm": 0.2847222222222222,
|
193 |
+
"acc_norm_stderr": 0.03773809990686935
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.25,
|
197 |
+
"acc_stderr": 0.04351941398892446,
|
198 |
+
"acc_norm": 0.25,
|
199 |
+
"acc_norm_stderr": 0.04351941398892446
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.48,
|
203 |
+
"acc_stderr": 0.050211673156867795,
|
204 |
+
"acc_norm": 0.48,
|
205 |
+
"acc_norm_stderr": 0.050211673156867795
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.3352601156069364,
|
209 |
+
"acc_stderr": 0.025416003773165555,
|
210 |
+
"acc_norm": 0.3352601156069364,
|
211 |
+
"acc_norm_stderr": 0.025416003773165555
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3558282208588957,
|
215 |
+
"acc_stderr": 0.03761521380046734,
|
216 |
+
"acc_norm": 0.3558282208588957,
|
217 |
+
"acc_norm_stderr": 0.03761521380046734
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.3425925925925926,
|
221 |
+
"acc_stderr": 0.026406145973625672,
|
222 |
+
"acc_norm": 0.3425925925925926,
|
223 |
+
"acc_norm_stderr": 0.026406145973625672
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.3,
|
227 |
+
"acc_stderr": 0.046056618647183814,
|
228 |
+
"acc_norm": 0.3,
|
229 |
+
"acc_norm_stderr": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.3626943005181347,
|
233 |
+
"acc_stderr": 0.03469713791704372,
|
234 |
+
"acc_norm": 0.3626943005181347,
|
235 |
+
"acc_norm_stderr": 0.03469713791704372
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.04227054451232199,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.04227054451232199
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.3486238532110092,
|
245 |
+
"acc_stderr": 0.020431254090714317,
|
246 |
+
"acc_norm": 0.3486238532110092,
|
247 |
+
"acc_norm_stderr": 0.020431254090714317
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2698412698412698,
|
251 |
+
"acc_stderr": 0.03970158273235172,
|
252 |
+
"acc_norm": 0.2698412698412698,
|
253 |
+
"acc_norm_stderr": 0.03970158273235172
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.38235294117647056,
|
257 |
+
"acc_stderr": 0.02782610930728369,
|
258 |
+
"acc_norm": 0.38235294117647056,
|
259 |
+
"acc_norm_stderr": 0.02782610930728369
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.4,
|
263 |
+
"acc_stderr": 0.049236596391733084,
|
264 |
+
"acc_norm": 0.4,
|
265 |
+
"acc_norm_stderr": 0.049236596391733084
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5041322314049587,
|
269 |
+
"acc_stderr": 0.045641987674327526,
|
270 |
+
"acc_norm": 0.5041322314049587,
|
271 |
+
"acc_norm_stderr": 0.045641987674327526
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.2565789473684211,
|
275 |
+
"acc_stderr": 0.0355418036802569,
|
276 |
+
"acc_norm": 0.2565789473684211,
|
277 |
+
"acc_norm_stderr": 0.0355418036802569
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3137254901960784,
|
281 |
+
"acc_stderr": 0.018771683893528186,
|
282 |
+
"acc_norm": 0.3137254901960784,
|
283 |
+
"acc_norm_stderr": 0.018771683893528186
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.26595744680851063,
|
287 |
+
"acc_stderr": 0.026358065698880592,
|
288 |
+
"acc_norm": 0.26595744680851063,
|
289 |
+
"acc_norm_stderr": 0.026358065698880592
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.36607142857142855,
|
293 |
+
"acc_stderr": 0.045723723587374296,
|
294 |
+
"acc_norm": 0.36607142857142855,
|
295 |
+
"acc_norm_stderr": 0.045723723587374296
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3101851851851852,
|
299 |
+
"acc_stderr": 0.031546962856566295,
|
300 |
+
"acc_norm": 0.3101851851851852,
|
301 |
+
"acc_norm_stderr": 0.031546962856566295
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.23687150837988827,
|
305 |
+
"acc_stderr": 0.014219570788103982,
|
306 |
+
"acc_norm": 0.23687150837988827,
|
307 |
+
"acc_norm_stderr": 0.014219570788103982
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.32,
|
311 |
+
"acc_stderr": 0.04688261722621504,
|
312 |
+
"acc_norm": 0.32,
|
313 |
+
"acc_norm_stderr": 0.04688261722621504
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.42,
|
317 |
+
"acc_stderr": 0.04960449637488584,
|
318 |
+
"acc_norm": 0.42,
|
319 |
+
"acc_norm_stderr": 0.04960449637488584
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3382352941176471,
|
323 |
+
"acc_stderr": 0.028739328513983583,
|
324 |
+
"acc_norm": 0.3382352941176471,
|
325 |
+
"acc_norm_stderr": 0.028739328513983583
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.20816326530612245,
|
329 |
+
"acc_stderr": 0.025991117672813296,
|
330 |
+
"acc_norm": 0.20816326530612245,
|
331 |
+
"acc_norm_stderr": 0.025991117672813296
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.4472573839662447,
|
335 |
+
"acc_stderr": 0.03236564251614192,
|
336 |
+
"acc_norm": 0.4472573839662447,
|
337 |
+
"acc_norm_stderr": 0.03236564251614192
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2790091264667536,
|
341 |
+
"acc_stderr": 0.011455208832803529,
|
342 |
+
"acc_norm": 0.2790091264667536,
|
343 |
+
"acc_norm_stderr": 0.011455208832803529
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.31862745098039214,
|
347 |
+
"acc_stderr": 0.0327028718148208,
|
348 |
+
"acc_norm": 0.31862745098039214,
|
349 |
+
"acc_norm_stderr": 0.0327028718148208
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.3151515151515151,
|
353 |
+
"acc_stderr": 0.0362773057502241,
|
354 |
+
"acc_norm": 0.3151515151515151,
|
355 |
+
"acc_norm_stderr": 0.0362773057502241
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2913096695226438,
|
359 |
+
"mc1_stderr": 0.01590598704818483,
|
360 |
+
"mc2": 0.44742099912433764,
|
361 |
+
"mc2_stderr": 0.015551700567433569
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.2987012987012987,
|
365 |
+
"acc_stderr": 0.01573565739143828,
|
366 |
+
"acc_norm": 0.4085005903187721,
|
367 |
+
"acc_norm_stderr": 0.01690006287942712
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-eng-ko-8b",
|
436 |
+
"model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-pre1-ds-lora1/result_2024-07-18 01:07:43.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.20136518771331058,
|
5 |
+
"acc_stderr": 0.011718927477444265,
|
6 |
+
"acc_norm": 0.24914675767918087,
|
7 |
+
"acc_norm_stderr": 0.012639407111926437
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.26110336586337385,
|
11 |
+
"acc_stderr": 0.004383384784038473,
|
12 |
+
"acc_norm": 0.2729535949014141,
|
13 |
+
"acc_norm_stderr": 0.004445667638734141
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.26900584795321636,
|
17 |
+
"acc_stderr": 0.0340105262010409,
|
18 |
+
"acc_norm": 0.26900584795321636,
|
19 |
+
"acc_norm_stderr": 0.0340105262010409
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.24271844660194175,
|
23 |
+
"acc_stderr": 0.04245022486384493,
|
24 |
+
"acc_norm": 0.24271844660194175,
|
25 |
+
"acc_norm_stderr": 0.04245022486384493
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.2720306513409962,
|
29 |
+
"acc_stderr": 0.015913367447500517,
|
30 |
+
"acc_norm": 0.2720306513409962,
|
31 |
+
"acc_norm_stderr": 0.015913367447500517
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.34074074074074073,
|
35 |
+
"acc_stderr": 0.040943762699967946,
|
36 |
+
"acc_norm": 0.34074074074074073,
|
37 |
+
"acc_norm_stderr": 0.040943762699967946
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.33,
|
41 |
+
"acc_stderr": 0.04725815626252604,
|
42 |
+
"acc_norm": 0.33,
|
43 |
+
"acc_norm_stderr": 0.04725815626252604
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.2936170212765957,
|
47 |
+
"acc_stderr": 0.029771642712491227,
|
48 |
+
"acc_norm": 0.2936170212765957,
|
49 |
+
"acc_norm_stderr": 0.029771642712491227
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.25301204819277107,
|
53 |
+
"acc_stderr": 0.03384429155233135,
|
54 |
+
"acc_norm": 0.25301204819277107,
|
55 |
+
"acc_norm_stderr": 0.03384429155233135
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.27009646302250806,
|
59 |
+
"acc_stderr": 0.025218040373410622,
|
60 |
+
"acc_norm": 0.27009646302250806,
|
61 |
+
"acc_norm_stderr": 0.025218040373410622
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.25112107623318386,
|
65 |
+
"acc_stderr": 0.029105220833224615,
|
66 |
+
"acc_norm": 0.25112107623318386,
|
67 |
+
"acc_norm_stderr": 0.029105220833224615
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.25190839694656486,
|
71 |
+
"acc_stderr": 0.038073871163060866,
|
72 |
+
"acc_norm": 0.25190839694656486,
|
73 |
+
"acc_norm_stderr": 0.038073871163060866
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.18,
|
77 |
+
"acc_stderr": 0.03861229196653694,
|
78 |
+
"acc_norm": 0.18,
|
79 |
+
"acc_norm_stderr": 0.03861229196653694
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.22727272727272727,
|
83 |
+
"acc_stderr": 0.02985751567338642,
|
84 |
+
"acc_norm": 0.22727272727272727,
|
85 |
+
"acc_norm_stderr": 0.02985751567338642
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3724137931034483,
|
89 |
+
"acc_stderr": 0.040287315329475604,
|
90 |
+
"acc_norm": 0.3724137931034483,
|
91 |
+
"acc_norm_stderr": 0.040287315329475604
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.13725490196078433,
|
95 |
+
"acc_stderr": 0.0342408466989152,
|
96 |
+
"acc_norm": 0.13725490196078433,
|
97 |
+
"acc_norm_stderr": 0.0342408466989152
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.23109243697478993,
|
101 |
+
"acc_stderr": 0.027381406927868966,
|
102 |
+
"acc_norm": 0.23109243697478993,
|
103 |
+
"acc_norm_stderr": 0.027381406927868966
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.2358974358974359,
|
107 |
+
"acc_stderr": 0.021525965407408726,
|
108 |
+
"acc_norm": 0.2358974358974359,
|
109 |
+
"acc_norm_stderr": 0.021525965407408726
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.33,
|
113 |
+
"acc_stderr": 0.04725815626252606,
|
114 |
+
"acc_norm": 0.33,
|
115 |
+
"acc_norm_stderr": 0.04725815626252606
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.36,
|
119 |
+
"acc_stderr": 0.048241815132442176,
|
120 |
+
"acc_norm": 0.36,
|
121 |
+
"acc_norm_stderr": 0.048241815132442176
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.28703703703703703,
|
125 |
+
"acc_stderr": 0.043733130409147614,
|
126 |
+
"acc_norm": 0.28703703703703703,
|
127 |
+
"acc_norm_stderr": 0.043733130409147614
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.33497536945812806,
|
131 |
+
"acc_stderr": 0.033208527423483104,
|
132 |
+
"acc_norm": 0.33497536945812806,
|
133 |
+
"acc_norm_stderr": 0.033208527423483104
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.2903225806451613,
|
137 |
+
"acc_stderr": 0.025822106119415898,
|
138 |
+
"acc_norm": 0.2903225806451613,
|
139 |
+
"acc_norm_stderr": 0.025822106119415898
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.27350427350427353,
|
143 |
+
"acc_stderr": 0.029202540153431173,
|
144 |
+
"acc_norm": 0.27350427350427353,
|
145 |
+
"acc_norm_stderr": 0.029202540153431173
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.27169811320754716,
|
149 |
+
"acc_stderr": 0.027377706624670713,
|
150 |
+
"acc_norm": 0.27169811320754716,
|
151 |
+
"acc_norm_stderr": 0.027377706624670713
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.23636363636363636,
|
155 |
+
"acc_stderr": 0.040693063197213754,
|
156 |
+
"acc_norm": 0.23636363636363636,
|
157 |
+
"acc_norm_stderr": 0.040693063197213754
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3,
|
161 |
+
"acc_stderr": 0.02794045713622842,
|
162 |
+
"acc_norm": 0.3,
|
163 |
+
"acc_norm_stderr": 0.02794045713622842
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.24503311258278146,
|
167 |
+
"acc_stderr": 0.03511807571804724,
|
168 |
+
"acc_norm": 0.24503311258278146,
|
169 |
+
"acc_norm_stderr": 0.03511807571804724
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.2885572139303483,
|
173 |
+
"acc_stderr": 0.03203841040213321,
|
174 |
+
"acc_norm": 0.2885572139303483,
|
175 |
+
"acc_norm_stderr": 0.03203841040213321
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.23699421965317918,
|
179 |
+
"acc_stderr": 0.03242414757483098,
|
180 |
+
"acc_norm": 0.23699421965317918,
|
181 |
+
"acc_norm_stderr": 0.03242414757483098
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.2962962962962963,
|
185 |
+
"acc_stderr": 0.02351729433596329,
|
186 |
+
"acc_norm": 0.2962962962962963,
|
187 |
+
"acc_norm_stderr": 0.02351729433596329
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.22916666666666666,
|
191 |
+
"acc_stderr": 0.03514697467862388,
|
192 |
+
"acc_norm": 0.22916666666666666,
|
193 |
+
"acc_norm_stderr": 0.03514697467862388
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.24,
|
197 |
+
"acc_stderr": 0.04292346959909282,
|
198 |
+
"acc_norm": 0.24,
|
199 |
+
"acc_norm_stderr": 0.04292346959909282
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.4,
|
203 |
+
"acc_stderr": 0.04923659639173309,
|
204 |
+
"acc_norm": 0.4,
|
205 |
+
"acc_norm_stderr": 0.04923659639173309
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.2976878612716763,
|
209 |
+
"acc_stderr": 0.024617055388677,
|
210 |
+
"acc_norm": 0.2976878612716763,
|
211 |
+
"acc_norm_stderr": 0.024617055388677
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.34355828220858897,
|
215 |
+
"acc_stderr": 0.037311335196738925,
|
216 |
+
"acc_norm": 0.34355828220858897,
|
217 |
+
"acc_norm_stderr": 0.037311335196738925
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.32098765432098764,
|
221 |
+
"acc_stderr": 0.025976566010862737,
|
222 |
+
"acc_norm": 0.32098765432098764,
|
223 |
+
"acc_norm_stderr": 0.025976566010862737
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.4,
|
227 |
+
"acc_stderr": 0.049236596391733084,
|
228 |
+
"acc_norm": 0.4,
|
229 |
+
"acc_norm_stderr": 0.049236596391733084
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.21243523316062177,
|
233 |
+
"acc_stderr": 0.02951928261681725,
|
234 |
+
"acc_norm": 0.21243523316062177,
|
235 |
+
"acc_norm_stderr": 0.02951928261681725
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.21929824561403508,
|
239 |
+
"acc_stderr": 0.03892431106518752,
|
240 |
+
"acc_norm": 0.21929824561403508,
|
241 |
+
"acc_norm_stderr": 0.03892431106518752
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.23669724770642203,
|
245 |
+
"acc_stderr": 0.01822407811729908,
|
246 |
+
"acc_norm": 0.23669724770642203,
|
247 |
+
"acc_norm_stderr": 0.01822407811729908
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.23809523809523808,
|
251 |
+
"acc_stderr": 0.03809523809523813,
|
252 |
+
"acc_norm": 0.23809523809523808,
|
253 |
+
"acc_norm_stderr": 0.03809523809523813
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.3202614379084967,
|
257 |
+
"acc_stderr": 0.026716118380156834,
|
258 |
+
"acc_norm": 0.3202614379084967,
|
259 |
+
"acc_norm_stderr": 0.026716118380156834
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.29,
|
263 |
+
"acc_stderr": 0.04560480215720685,
|
264 |
+
"acc_norm": 0.29,
|
265 |
+
"acc_norm_stderr": 0.04560480215720685
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.39669421487603307,
|
269 |
+
"acc_stderr": 0.04465869780531009,
|
270 |
+
"acc_norm": 0.39669421487603307,
|
271 |
+
"acc_norm_stderr": 0.04465869780531009
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.3355263157894737,
|
275 |
+
"acc_stderr": 0.03842498559395269,
|
276 |
+
"acc_norm": 0.3355263157894737,
|
277 |
+
"acc_norm_stderr": 0.03842498559395269
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2777777777777778,
|
281 |
+
"acc_stderr": 0.018120224251484587,
|
282 |
+
"acc_norm": 0.2777777777777778,
|
283 |
+
"acc_norm_stderr": 0.018120224251484587
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2553191489361702,
|
287 |
+
"acc_stderr": 0.026011992930902002,
|
288 |
+
"acc_norm": 0.2553191489361702,
|
289 |
+
"acc_norm_stderr": 0.026011992930902002
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.32142857142857145,
|
293 |
+
"acc_stderr": 0.04432804055291519,
|
294 |
+
"acc_norm": 0.32142857142857145,
|
295 |
+
"acc_norm_stderr": 0.04432804055291519
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.18055555555555555,
|
299 |
+
"acc_stderr": 0.026232878971491656,
|
300 |
+
"acc_norm": 0.18055555555555555,
|
301 |
+
"acc_norm_stderr": 0.026232878971491656
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.25027932960893856,
|
305 |
+
"acc_stderr": 0.01448750085285042,
|
306 |
+
"acc_norm": 0.25027932960893856,
|
307 |
+
"acc_norm_stderr": 0.01448750085285042
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.21,
|
311 |
+
"acc_stderr": 0.040936018074033256,
|
312 |
+
"acc_norm": 0.21,
|
313 |
+
"acc_norm_stderr": 0.040936018074033256
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.29,
|
317 |
+
"acc_stderr": 0.045604802157206845,
|
318 |
+
"acc_norm": 0.29,
|
319 |
+
"acc_norm_stderr": 0.045604802157206845
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.21323529411764705,
|
323 |
+
"acc_stderr": 0.024880971512294254,
|
324 |
+
"acc_norm": 0.21323529411764705,
|
325 |
+
"acc_norm_stderr": 0.024880971512294254
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.23265306122448978,
|
329 |
+
"acc_stderr": 0.02704925791589618,
|
330 |
+
"acc_norm": 0.23265306122448978,
|
331 |
+
"acc_norm_stderr": 0.02704925791589618
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.26582278481012656,
|
335 |
+
"acc_stderr": 0.02875679962965834,
|
336 |
+
"acc_norm": 0.26582278481012656,
|
337 |
+
"acc_norm_stderr": 0.02875679962965834
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.26792698826597133,
|
341 |
+
"acc_stderr": 0.011311347690633869,
|
342 |
+
"acc_norm": 0.26792698826597133,
|
343 |
+
"acc_norm_stderr": 0.011311347690633869
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.2549019607843137,
|
347 |
+
"acc_stderr": 0.030587591351604246,
|
348 |
+
"acc_norm": 0.2549019607843137,
|
349 |
+
"acc_norm_stderr": 0.030587591351604246
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.2545454545454545,
|
353 |
+
"acc_stderr": 0.0340150671524904,
|
354 |
+
"acc_norm": 0.2545454545454545,
|
355 |
+
"acc_norm_stderr": 0.0340150671524904
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.24969400244798043,
|
359 |
+
"mc1_stderr": 0.015152286907148128,
|
360 |
+
"mc2": 0.4821689215890819,
|
361 |
+
"mc2_stderr": 0.016978019371229284
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.10861865407319952,
|
365 |
+
"acc_stderr": 0.010697906495255899,
|
366 |
+
"acc_norm": 0.32113341204250295,
|
367 |
+
"acc_norm_stderr": 0.016052762579111573
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-pre1-ds-lora1",
|
436 |
+
"model_sha": "e958846fd51d3fff3221716153cbd7a2df924dd2",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-pre1-ds-lora2/result_2024-07-19 01:53:13.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.19965870307167236,
|
5 |
+
"acc_stderr": 0.011681625756888693,
|
6 |
+
"acc_norm": 0.25,
|
7 |
+
"acc_norm_stderr": 0.012653835621466646
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.255327623979287,
|
11 |
+
"acc_stderr": 0.0043515406039885685,
|
12 |
+
"acc_norm": 0.27604062935670187,
|
13 |
+
"acc_norm_stderr": 0.004461235175488321
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.30409356725146197,
|
17 |
+
"acc_stderr": 0.0352821125824523,
|
18 |
+
"acc_norm": 0.30409356725146197,
|
19 |
+
"acc_norm_stderr": 0.0352821125824523
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2815533980582524,
|
23 |
+
"acc_stderr": 0.044532548363264673,
|
24 |
+
"acc_norm": 0.2815533980582524,
|
25 |
+
"acc_norm_stderr": 0.044532548363264673
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.2822477650063857,
|
29 |
+
"acc_stderr": 0.016095302969878558,
|
30 |
+
"acc_norm": 0.2822477650063857,
|
31 |
+
"acc_norm_stderr": 0.016095302969878558
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.26666666666666666,
|
35 |
+
"acc_stderr": 0.038201699145179055,
|
36 |
+
"acc_norm": 0.26666666666666666,
|
37 |
+
"acc_norm_stderr": 0.038201699145179055
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.28,
|
41 |
+
"acc_stderr": 0.045126085985421276,
|
42 |
+
"acc_norm": 0.28,
|
43 |
+
"acc_norm_stderr": 0.045126085985421276
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.2978723404255319,
|
47 |
+
"acc_stderr": 0.029896145682095455,
|
48 |
+
"acc_norm": 0.2978723404255319,
|
49 |
+
"acc_norm_stderr": 0.029896145682095455
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.30120481927710846,
|
53 |
+
"acc_stderr": 0.035716092300534796,
|
54 |
+
"acc_norm": 0.30120481927710846,
|
55 |
+
"acc_norm_stderr": 0.035716092300534796
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.2765273311897106,
|
59 |
+
"acc_stderr": 0.025403832978179604,
|
60 |
+
"acc_norm": 0.2765273311897106,
|
61 |
+
"acc_norm_stderr": 0.025403832978179604
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.30493273542600896,
|
65 |
+
"acc_stderr": 0.030898610882477515,
|
66 |
+
"acc_norm": 0.30493273542600896,
|
67 |
+
"acc_norm_stderr": 0.030898610882477515
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.2824427480916031,
|
71 |
+
"acc_stderr": 0.03948406125768361,
|
72 |
+
"acc_norm": 0.2824427480916031,
|
73 |
+
"acc_norm_stderr": 0.03948406125768361
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.24,
|
77 |
+
"acc_stderr": 0.042923469599092816,
|
78 |
+
"acc_norm": 0.24,
|
79 |
+
"acc_norm_stderr": 0.042923469599092816
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.23232323232323232,
|
83 |
+
"acc_stderr": 0.030088629490217483,
|
84 |
+
"acc_norm": 0.23232323232323232,
|
85 |
+
"acc_norm_stderr": 0.030088629490217483
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3448275862068966,
|
89 |
+
"acc_stderr": 0.039609335494512087,
|
90 |
+
"acc_norm": 0.3448275862068966,
|
91 |
+
"acc_norm_stderr": 0.039609335494512087
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.20588235294117646,
|
95 |
+
"acc_stderr": 0.04023382273617747,
|
96 |
+
"acc_norm": 0.20588235294117646,
|
97 |
+
"acc_norm_stderr": 0.04023382273617747
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.24789915966386555,
|
101 |
+
"acc_stderr": 0.028047967224176896,
|
102 |
+
"acc_norm": 0.24789915966386555,
|
103 |
+
"acc_norm_stderr": 0.028047967224176896
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.2692307692307692,
|
107 |
+
"acc_stderr": 0.022489389793654824,
|
108 |
+
"acc_norm": 0.2692307692307692,
|
109 |
+
"acc_norm_stderr": 0.022489389793654824
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.36,
|
113 |
+
"acc_stderr": 0.04824181513244218,
|
114 |
+
"acc_norm": 0.36,
|
115 |
+
"acc_norm_stderr": 0.04824181513244218
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.4,
|
119 |
+
"acc_stderr": 0.04923659639173309,
|
120 |
+
"acc_norm": 0.4,
|
121 |
+
"acc_norm_stderr": 0.04923659639173309
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.28703703703703703,
|
125 |
+
"acc_stderr": 0.043733130409147614,
|
126 |
+
"acc_norm": 0.28703703703703703,
|
127 |
+
"acc_norm_stderr": 0.043733130409147614
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.33497536945812806,
|
131 |
+
"acc_stderr": 0.033208527423483104,
|
132 |
+
"acc_norm": 0.33497536945812806,
|
133 |
+
"acc_norm_stderr": 0.033208527423483104
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.26129032258064516,
|
137 |
+
"acc_stderr": 0.024993053397764826,
|
138 |
+
"acc_norm": 0.26129032258064516,
|
139 |
+
"acc_norm_stderr": 0.024993053397764826
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.2606837606837607,
|
143 |
+
"acc_stderr": 0.028760348956523414,
|
144 |
+
"acc_norm": 0.2606837606837607,
|
145 |
+
"acc_norm_stderr": 0.028760348956523414
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.2490566037735849,
|
149 |
+
"acc_stderr": 0.02661648298050171,
|
150 |
+
"acc_norm": 0.2490566037735849,
|
151 |
+
"acc_norm_stderr": 0.02661648298050171
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.2,
|
155 |
+
"acc_stderr": 0.03831305140884603,
|
156 |
+
"acc_norm": 0.2,
|
157 |
+
"acc_norm_stderr": 0.03831305140884603
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3,
|
161 |
+
"acc_stderr": 0.027940457136228416,
|
162 |
+
"acc_norm": 0.3,
|
163 |
+
"acc_norm_stderr": 0.027940457136228416
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.23841059602649006,
|
167 |
+
"acc_stderr": 0.03479185572599661,
|
168 |
+
"acc_norm": 0.23841059602649006,
|
169 |
+
"acc_norm_stderr": 0.03479185572599661
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.2736318407960199,
|
173 |
+
"acc_stderr": 0.03152439186555402,
|
174 |
+
"acc_norm": 0.2736318407960199,
|
175 |
+
"acc_norm_stderr": 0.03152439186555402
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.27167630057803466,
|
179 |
+
"acc_stderr": 0.03391750322321659,
|
180 |
+
"acc_norm": 0.27167630057803466,
|
181 |
+
"acc_norm_stderr": 0.03391750322321659
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.29365079365079366,
|
185 |
+
"acc_stderr": 0.023456037383982026,
|
186 |
+
"acc_norm": 0.29365079365079366,
|
187 |
+
"acc_norm_stderr": 0.023456037383982026
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.1875,
|
191 |
+
"acc_stderr": 0.032639560491693344,
|
192 |
+
"acc_norm": 0.1875,
|
193 |
+
"acc_norm_stderr": 0.032639560491693344
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.24,
|
197 |
+
"acc_stderr": 0.042923469599092816,
|
198 |
+
"acc_norm": 0.24,
|
199 |
+
"acc_norm_stderr": 0.042923469599092816
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.31,
|
203 |
+
"acc_stderr": 0.04648231987117316,
|
204 |
+
"acc_norm": 0.31,
|
205 |
+
"acc_norm_stderr": 0.04648231987117316
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.30346820809248554,
|
209 |
+
"acc_stderr": 0.024752411960917212,
|
210 |
+
"acc_norm": 0.30346820809248554,
|
211 |
+
"acc_norm_stderr": 0.024752411960917212
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.32515337423312884,
|
215 |
+
"acc_stderr": 0.036803503712864595,
|
216 |
+
"acc_norm": 0.32515337423312884,
|
217 |
+
"acc_norm_stderr": 0.036803503712864595
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.24691358024691357,
|
221 |
+
"acc_stderr": 0.02399350170904212,
|
222 |
+
"acc_norm": 0.24691358024691357,
|
223 |
+
"acc_norm_stderr": 0.02399350170904212
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.35,
|
227 |
+
"acc_stderr": 0.047937248544110196,
|
228 |
+
"acc_norm": 0.35,
|
229 |
+
"acc_norm_stderr": 0.047937248544110196
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.21761658031088082,
|
233 |
+
"acc_stderr": 0.029778663037752954,
|
234 |
+
"acc_norm": 0.21761658031088082,
|
235 |
+
"acc_norm_stderr": 0.029778663037752954
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.24561403508771928,
|
239 |
+
"acc_stderr": 0.040493392977481404,
|
240 |
+
"acc_norm": 0.24561403508771928,
|
241 |
+
"acc_norm_stderr": 0.040493392977481404
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.21651376146788992,
|
245 |
+
"acc_stderr": 0.01765871059444313,
|
246 |
+
"acc_norm": 0.21651376146788992,
|
247 |
+
"acc_norm_stderr": 0.01765871059444313
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.24603174603174602,
|
251 |
+
"acc_stderr": 0.03852273364924315,
|
252 |
+
"acc_norm": 0.24603174603174602,
|
253 |
+
"acc_norm_stderr": 0.03852273364924315
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.27450980392156865,
|
257 |
+
"acc_stderr": 0.02555316999182651,
|
258 |
+
"acc_norm": 0.27450980392156865,
|
259 |
+
"acc_norm_stderr": 0.02555316999182651
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.25,
|
263 |
+
"acc_stderr": 0.04351941398892446,
|
264 |
+
"acc_norm": 0.25,
|
265 |
+
"acc_norm_stderr": 0.04351941398892446
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.3140495867768595,
|
269 |
+
"acc_stderr": 0.04236964753041018,
|
270 |
+
"acc_norm": 0.3140495867768595,
|
271 |
+
"acc_norm_stderr": 0.04236964753041018
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.29605263157894735,
|
275 |
+
"acc_stderr": 0.03715062154998904,
|
276 |
+
"acc_norm": 0.29605263157894735,
|
277 |
+
"acc_norm_stderr": 0.03715062154998904
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2777777777777778,
|
281 |
+
"acc_stderr": 0.018120224251484598,
|
282 |
+
"acc_norm": 0.2777777777777778,
|
283 |
+
"acc_norm_stderr": 0.018120224251484598
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.2695035460992908,
|
287 |
+
"acc_stderr": 0.026469036818590634,
|
288 |
+
"acc_norm": 0.2695035460992908,
|
289 |
+
"acc_norm_stderr": 0.026469036818590634
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.36607142857142855,
|
293 |
+
"acc_stderr": 0.0457237235873743,
|
294 |
+
"acc_norm": 0.36607142857142855,
|
295 |
+
"acc_norm_stderr": 0.0457237235873743
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3194444444444444,
|
299 |
+
"acc_stderr": 0.031798763421768524,
|
300 |
+
"acc_norm": 0.3194444444444444,
|
301 |
+
"acc_norm_stderr": 0.031798763421768524
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.24916201117318434,
|
305 |
+
"acc_stderr": 0.014465893829859926,
|
306 |
+
"acc_norm": 0.24916201117318434,
|
307 |
+
"acc_norm_stderr": 0.014465893829859926
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.3,
|
311 |
+
"acc_stderr": 0.046056618647183814,
|
312 |
+
"acc_norm": 0.3,
|
313 |
+
"acc_norm_stderr": 0.046056618647183814
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.29,
|
317 |
+
"acc_stderr": 0.045604802157206845,
|
318 |
+
"acc_norm": 0.29,
|
319 |
+
"acc_norm_stderr": 0.045604802157206845
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.21691176470588236,
|
323 |
+
"acc_stderr": 0.02503584522771126,
|
324 |
+
"acc_norm": 0.21691176470588236,
|
325 |
+
"acc_norm_stderr": 0.02503584522771126
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.2163265306122449,
|
329 |
+
"acc_stderr": 0.026358916334904024,
|
330 |
+
"acc_norm": 0.2163265306122449,
|
331 |
+
"acc_norm_stderr": 0.026358916334904024
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.2489451476793249,
|
335 |
+
"acc_stderr": 0.028146970599422644,
|
336 |
+
"acc_norm": 0.2489451476793249,
|
337 |
+
"acc_norm_stderr": 0.028146970599422644
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2646675358539765,
|
341 |
+
"acc_stderr": 0.011267332992845524,
|
342 |
+
"acc_norm": 0.2646675358539765,
|
343 |
+
"acc_norm_stderr": 0.011267332992845524
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.2549019607843137,
|
347 |
+
"acc_stderr": 0.030587591351604246,
|
348 |
+
"acc_norm": 0.2549019607843137,
|
349 |
+
"acc_norm_stderr": 0.030587591351604246
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.2545454545454545,
|
353 |
+
"acc_stderr": 0.0340150671524904,
|
354 |
+
"acc_norm": 0.2545454545454545,
|
355 |
+
"acc_norm_stderr": 0.0340150671524904
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2484700122399021,
|
359 |
+
"mc1_stderr": 0.015127427096520688,
|
360 |
+
"mc2": 0.48623344584189665,
|
361 |
+
"mc2_stderr": 0.016862674875056858
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.12514757969303425,
|
365 |
+
"acc_stderr": 0.011376101146401418,
|
366 |
+
"acc_norm": 0.21959858323494688,
|
367 |
+
"acc_norm_stderr": 0.014232743085580252
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-pre1-ds-lora2",
|
436 |
+
"model_sha": "852b1091a5bbda40c7013948b1f8ec4094844456",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-pre1-ds-lora3/result_2024-07-23 07:30:22.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2568259385665529,
|
5 |
+
"acc_stderr": 0.012766923794116801,
|
6 |
+
"acc_norm": 0.3097269624573379,
|
7 |
+
"acc_norm_stderr": 0.013512058415238363
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.2590121489743079,
|
11 |
+
"acc_stderr": 0.004371969542814558,
|
12 |
+
"acc_norm": 0.27106154152559253,
|
13 |
+
"acc_norm_stderr": 0.0044359934925838835
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.5497076023391813,
|
17 |
+
"acc_stderr": 0.03815827365913237,
|
18 |
+
"acc_norm": 0.5497076023391813,
|
19 |
+
"acc_norm_stderr": 0.03815827365913237
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.5048543689320388,
|
23 |
+
"acc_stderr": 0.049505043821289195,
|
24 |
+
"acc_norm": 0.5048543689320388,
|
25 |
+
"acc_norm_stderr": 0.049505043821289195
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.48659003831417624,
|
29 |
+
"acc_stderr": 0.01787353173651038,
|
30 |
+
"acc_norm": 0.48659003831417624,
|
31 |
+
"acc_norm_stderr": 0.01787353173651038
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.43703703703703706,
|
35 |
+
"acc_stderr": 0.04284958639753399,
|
36 |
+
"acc_norm": 0.43703703703703706,
|
37 |
+
"acc_norm_stderr": 0.04284958639753399
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.36,
|
41 |
+
"acc_stderr": 0.04824181513244218,
|
42 |
+
"acc_norm": 0.36,
|
43 |
+
"acc_norm_stderr": 0.04824181513244218
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.3872340425531915,
|
47 |
+
"acc_stderr": 0.03184389265339526,
|
48 |
+
"acc_norm": 0.3872340425531915,
|
49 |
+
"acc_norm_stderr": 0.03184389265339526
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.42771084337349397,
|
53 |
+
"acc_stderr": 0.038515976837185335,
|
54 |
+
"acc_norm": 0.42771084337349397,
|
55 |
+
"acc_norm_stderr": 0.038515976837185335
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.45016077170418006,
|
59 |
+
"acc_stderr": 0.028256660723360177,
|
60 |
+
"acc_norm": 0.45016077170418006,
|
61 |
+
"acc_norm_stderr": 0.028256660723360177
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.4080717488789238,
|
65 |
+
"acc_stderr": 0.03298574607842822,
|
66 |
+
"acc_norm": 0.4080717488789238,
|
67 |
+
"acc_norm_stderr": 0.03298574607842822
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.5190839694656488,
|
71 |
+
"acc_stderr": 0.04382094705550988,
|
72 |
+
"acc_norm": 0.5190839694656488,
|
73 |
+
"acc_norm_stderr": 0.04382094705550988
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.38,
|
77 |
+
"acc_stderr": 0.048783173121456316,
|
78 |
+
"acc_norm": 0.38,
|
79 |
+
"acc_norm_stderr": 0.048783173121456316
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.4494949494949495,
|
83 |
+
"acc_stderr": 0.03544132491947969,
|
84 |
+
"acc_norm": 0.4494949494949495,
|
85 |
+
"acc_norm_stderr": 0.03544132491947969
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4689655172413793,
|
89 |
+
"acc_stderr": 0.04158632762097828,
|
90 |
+
"acc_norm": 0.4689655172413793,
|
91 |
+
"acc_norm_stderr": 0.04158632762097828
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.30392156862745096,
|
95 |
+
"acc_stderr": 0.04576665403207762,
|
96 |
+
"acc_norm": 0.30392156862745096,
|
97 |
+
"acc_norm_stderr": 0.04576665403207762
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.41596638655462187,
|
101 |
+
"acc_stderr": 0.03201650100739615,
|
102 |
+
"acc_norm": 0.41596638655462187,
|
103 |
+
"acc_norm_stderr": 0.03201650100739615
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.358974358974359,
|
107 |
+
"acc_stderr": 0.024321738484602364,
|
108 |
+
"acc_norm": 0.358974358974359,
|
109 |
+
"acc_norm_stderr": 0.024321738484602364
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.59,
|
113 |
+
"acc_stderr": 0.049431107042371025,
|
114 |
+
"acc_norm": 0.59,
|
115 |
+
"acc_norm_stderr": 0.049431107042371025
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.31,
|
119 |
+
"acc_stderr": 0.04648231987117316,
|
120 |
+
"acc_norm": 0.31,
|
121 |
+
"acc_norm_stderr": 0.04648231987117316
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.39814814814814814,
|
125 |
+
"acc_stderr": 0.04732332615978814,
|
126 |
+
"acc_norm": 0.39814814814814814,
|
127 |
+
"acc_norm_stderr": 0.04732332615978814
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.42857142857142855,
|
131 |
+
"acc_stderr": 0.034819048444388045,
|
132 |
+
"acc_norm": 0.42857142857142855,
|
133 |
+
"acc_norm_stderr": 0.034819048444388045
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.4774193548387097,
|
137 |
+
"acc_stderr": 0.028414985019707868,
|
138 |
+
"acc_norm": 0.4774193548387097,
|
139 |
+
"acc_norm_stderr": 0.028414985019707868
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.6538461538461539,
|
143 |
+
"acc_stderr": 0.0311669573672359,
|
144 |
+
"acc_norm": 0.6538461538461539,
|
145 |
+
"acc_norm_stderr": 0.0311669573672359
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.39245283018867927,
|
149 |
+
"acc_stderr": 0.03005258057955784,
|
150 |
+
"acc_norm": 0.39245283018867927,
|
151 |
+
"acc_norm_stderr": 0.03005258057955784
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.4636363636363636,
|
155 |
+
"acc_stderr": 0.047764491623961985,
|
156 |
+
"acc_norm": 0.4636363636363636,
|
157 |
+
"acc_norm_stderr": 0.047764491623961985
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.3,
|
161 |
+
"acc_stderr": 0.027940457136228412,
|
162 |
+
"acc_norm": 0.3,
|
163 |
+
"acc_norm_stderr": 0.027940457136228412
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.31125827814569534,
|
167 |
+
"acc_stderr": 0.03780445850526732,
|
168 |
+
"acc_norm": 0.31125827814569534,
|
169 |
+
"acc_norm_stderr": 0.03780445850526732
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.6019900497512438,
|
173 |
+
"acc_stderr": 0.034611994290400135,
|
174 |
+
"acc_norm": 0.6019900497512438,
|
175 |
+
"acc_norm_stderr": 0.034611994290400135
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3988439306358382,
|
179 |
+
"acc_stderr": 0.03733626655383509,
|
180 |
+
"acc_norm": 0.3988439306358382,
|
181 |
+
"acc_norm_stderr": 0.03733626655383509
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.30423280423280424,
|
185 |
+
"acc_stderr": 0.023695415009463087,
|
186 |
+
"acc_norm": 0.30423280423280424,
|
187 |
+
"acc_norm_stderr": 0.023695415009463087
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.3472222222222222,
|
191 |
+
"acc_stderr": 0.0398124054371786,
|
192 |
+
"acc_norm": 0.3472222222222222,
|
193 |
+
"acc_norm_stderr": 0.0398124054371786
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.3,
|
197 |
+
"acc_stderr": 0.046056618647183814,
|
198 |
+
"acc_norm": 0.3,
|
199 |
+
"acc_norm_stderr": 0.046056618647183814
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.63,
|
203 |
+
"acc_stderr": 0.048523658709391,
|
204 |
+
"acc_norm": 0.63,
|
205 |
+
"acc_norm_stderr": 0.048523658709391
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.3988439306358382,
|
209 |
+
"acc_stderr": 0.026362437574546545,
|
210 |
+
"acc_norm": 0.3988439306358382,
|
211 |
+
"acc_norm_stderr": 0.026362437574546545
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3619631901840491,
|
215 |
+
"acc_stderr": 0.037757007291414416,
|
216 |
+
"acc_norm": 0.3619631901840491,
|
217 |
+
"acc_norm_stderr": 0.037757007291414416
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.4382716049382716,
|
221 |
+
"acc_stderr": 0.027607914087400463,
|
222 |
+
"acc_norm": 0.4382716049382716,
|
223 |
+
"acc_norm_stderr": 0.027607914087400463
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.33,
|
227 |
+
"acc_stderr": 0.04725815626252604,
|
228 |
+
"acc_norm": 0.33,
|
229 |
+
"acc_norm_stderr": 0.04725815626252604
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.37305699481865284,
|
233 |
+
"acc_stderr": 0.034902055920485744,
|
234 |
+
"acc_norm": 0.37305699481865284,
|
235 |
+
"acc_norm_stderr": 0.034902055920485744
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2894736842105263,
|
239 |
+
"acc_stderr": 0.04266339443159394,
|
240 |
+
"acc_norm": 0.2894736842105263,
|
241 |
+
"acc_norm_stderr": 0.04266339443159394
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.46055045871559636,
|
245 |
+
"acc_stderr": 0.021370494609995093,
|
246 |
+
"acc_norm": 0.46055045871559636,
|
247 |
+
"acc_norm_stderr": 0.021370494609995093
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.2698412698412698,
|
251 |
+
"acc_stderr": 0.03970158273235173,
|
252 |
+
"acc_norm": 0.2698412698412698,
|
253 |
+
"acc_norm_stderr": 0.03970158273235173
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.43137254901960786,
|
257 |
+
"acc_stderr": 0.028358956313423552,
|
258 |
+
"acc_norm": 0.43137254901960786,
|
259 |
+
"acc_norm_stderr": 0.028358956313423552
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.38,
|
263 |
+
"acc_stderr": 0.048783173121456316,
|
264 |
+
"acc_norm": 0.38,
|
265 |
+
"acc_norm_stderr": 0.048783173121456316
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.5950413223140496,
|
269 |
+
"acc_stderr": 0.04481137755942469,
|
270 |
+
"acc_norm": 0.5950413223140496,
|
271 |
+
"acc_norm_stderr": 0.04481137755942469
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.47368421052631576,
|
275 |
+
"acc_stderr": 0.04063302731486671,
|
276 |
+
"acc_norm": 0.47368421052631576,
|
277 |
+
"acc_norm_stderr": 0.04063302731486671
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.3545751633986928,
|
281 |
+
"acc_stderr": 0.01935336054755369,
|
282 |
+
"acc_norm": 0.3545751633986928,
|
283 |
+
"acc_norm_stderr": 0.01935336054755369
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.32269503546099293,
|
287 |
+
"acc_stderr": 0.027889139300534802,
|
288 |
+
"acc_norm": 0.32269503546099293,
|
289 |
+
"acc_norm_stderr": 0.027889139300534802
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.39285714285714285,
|
293 |
+
"acc_stderr": 0.04635550135609976,
|
294 |
+
"acc_norm": 0.39285714285714285,
|
295 |
+
"acc_norm_stderr": 0.04635550135609976
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3472222222222222,
|
299 |
+
"acc_stderr": 0.032468872436376486,
|
300 |
+
"acc_norm": 0.3472222222222222,
|
301 |
+
"acc_norm_stderr": 0.032468872436376486
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.24692737430167597,
|
305 |
+
"acc_stderr": 0.014422292204808852,
|
306 |
+
"acc_norm": 0.24692737430167597,
|
307 |
+
"acc_norm_stderr": 0.014422292204808852
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.31,
|
311 |
+
"acc_stderr": 0.04648231987117316,
|
312 |
+
"acc_norm": 0.31,
|
313 |
+
"acc_norm_stderr": 0.04648231987117316
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.44,
|
317 |
+
"acc_stderr": 0.04988876515698589,
|
318 |
+
"acc_norm": 0.44,
|
319 |
+
"acc_norm_stderr": 0.04988876515698589
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.3235294117647059,
|
323 |
+
"acc_stderr": 0.02841820861940679,
|
324 |
+
"acc_norm": 0.3235294117647059,
|
325 |
+
"acc_norm_stderr": 0.02841820861940679
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.4530612244897959,
|
329 |
+
"acc_stderr": 0.03186785930004129,
|
330 |
+
"acc_norm": 0.4530612244897959,
|
331 |
+
"acc_norm_stderr": 0.03186785930004129
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.5147679324894515,
|
335 |
+
"acc_stderr": 0.03253302807877738,
|
336 |
+
"acc_norm": 0.5147679324894515,
|
337 |
+
"acc_norm_stderr": 0.03253302807877738
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.29986962190352023,
|
341 |
+
"acc_stderr": 0.011702660860193975,
|
342 |
+
"acc_norm": 0.29986962190352023,
|
343 |
+
"acc_norm_stderr": 0.011702660860193975
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.39705882352941174,
|
347 |
+
"acc_stderr": 0.034341311647191286,
|
348 |
+
"acc_norm": 0.39705882352941174,
|
349 |
+
"acc_norm_stderr": 0.034341311647191286
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.40606060606060607,
|
353 |
+
"acc_stderr": 0.03834816355401181,
|
354 |
+
"acc_norm": 0.40606060606060607,
|
355 |
+
"acc_norm_stderr": 0.03834816355401181
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.25091799265605874,
|
359 |
+
"mc1_stderr": 0.015176985027707687,
|
360 |
+
"mc2": 0.49534110195918407,
|
361 |
+
"mc2_stderr": 0.017080132275211678
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.09445100354191263,
|
365 |
+
"acc_stderr": 0.010054814982894204,
|
366 |
+
"acc_norm": 0.35064935064935066,
|
367 |
+
"acc_norm_stderr": 0.016405556903893295
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-pre1-ds-lora3",
|
436 |
+
"model_sha": "6749d7d4761a167f1c160a7d0b9e746f3cb04aab",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-pre1-pre2-ds-ins2-lora3/result_2024-07-31 06:39:29.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.20392491467576793,
|
5 |
+
"acc_stderr": 0.011774262478702254,
|
6 |
+
"acc_norm": 0.2551194539249147,
|
7 |
+
"acc_norm_stderr": 0.012739038695202102
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.2531368253335989,
|
11 |
+
"acc_stderr": 0.0043392003634544945,
|
12 |
+
"acc_norm": 0.2502489543915555,
|
13 |
+
"acc_norm_stderr": 0.004322710911026373
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.391812865497076,
|
17 |
+
"acc_stderr": 0.03743979825926401,
|
18 |
+
"acc_norm": 0.391812865497076,
|
19 |
+
"acc_norm_stderr": 0.03743979825926401
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2815533980582524,
|
23 |
+
"acc_stderr": 0.044532548363264673,
|
24 |
+
"acc_norm": 0.2815533980582524,
|
25 |
+
"acc_norm_stderr": 0.044532548363264673
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.30779054916985954,
|
29 |
+
"acc_stderr": 0.016506045045155633,
|
30 |
+
"acc_norm": 0.30779054916985954,
|
31 |
+
"acc_norm_stderr": 0.016506045045155633
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.37777777777777777,
|
35 |
+
"acc_stderr": 0.04188307537595853,
|
36 |
+
"acc_norm": 0.37777777777777777,
|
37 |
+
"acc_norm_stderr": 0.04188307537595853
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.37,
|
41 |
+
"acc_stderr": 0.048523658709391,
|
42 |
+
"acc_norm": 0.37,
|
43 |
+
"acc_norm_stderr": 0.048523658709391
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.2425531914893617,
|
47 |
+
"acc_stderr": 0.028020226271200217,
|
48 |
+
"acc_norm": 0.2425531914893617,
|
49 |
+
"acc_norm_stderr": 0.028020226271200217
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3072289156626506,
|
53 |
+
"acc_stderr": 0.035915667978246635,
|
54 |
+
"acc_norm": 0.3072289156626506,
|
55 |
+
"acc_norm_stderr": 0.035915667978246635
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.35691318327974275,
|
59 |
+
"acc_stderr": 0.02721042037593403,
|
60 |
+
"acc_norm": 0.35691318327974275,
|
61 |
+
"acc_norm_stderr": 0.02721042037593403
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.242152466367713,
|
65 |
+
"acc_stderr": 0.028751392398694755,
|
66 |
+
"acc_norm": 0.242152466367713,
|
67 |
+
"acc_norm_stderr": 0.028751392398694755
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.3435114503816794,
|
71 |
+
"acc_stderr": 0.041649760719448786,
|
72 |
+
"acc_norm": 0.3435114503816794,
|
73 |
+
"acc_norm_stderr": 0.041649760719448786
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.34,
|
77 |
+
"acc_stderr": 0.04760952285695235,
|
78 |
+
"acc_norm": 0.34,
|
79 |
+
"acc_norm_stderr": 0.04760952285695235
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.29292929292929293,
|
83 |
+
"acc_stderr": 0.03242497958178817,
|
84 |
+
"acc_norm": 0.29292929292929293,
|
85 |
+
"acc_norm_stderr": 0.03242497958178817
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3586206896551724,
|
89 |
+
"acc_stderr": 0.039966295748767186,
|
90 |
+
"acc_norm": 0.3586206896551724,
|
91 |
+
"acc_norm_stderr": 0.039966295748767186
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.21568627450980393,
|
95 |
+
"acc_stderr": 0.04092563958237656,
|
96 |
+
"acc_norm": 0.21568627450980393,
|
97 |
+
"acc_norm_stderr": 0.04092563958237656
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.2773109243697479,
|
101 |
+
"acc_stderr": 0.029079374539480007,
|
102 |
+
"acc_norm": 0.2773109243697479,
|
103 |
+
"acc_norm_stderr": 0.029079374539480007
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.2564102564102564,
|
107 |
+
"acc_stderr": 0.022139081103971527,
|
108 |
+
"acc_norm": 0.2564102564102564,
|
109 |
+
"acc_norm_stderr": 0.022139081103971527
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.35,
|
113 |
+
"acc_stderr": 0.047937248544110196,
|
114 |
+
"acc_norm": 0.35,
|
115 |
+
"acc_norm_stderr": 0.047937248544110196
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.38,
|
119 |
+
"acc_stderr": 0.04878317312145633,
|
120 |
+
"acc_norm": 0.38,
|
121 |
+
"acc_norm_stderr": 0.04878317312145633
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.28703703703703703,
|
125 |
+
"acc_stderr": 0.043733130409147614,
|
126 |
+
"acc_norm": 0.28703703703703703,
|
127 |
+
"acc_norm_stderr": 0.043733130409147614
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.30049261083743845,
|
131 |
+
"acc_stderr": 0.03225799476233484,
|
132 |
+
"acc_norm": 0.30049261083743845,
|
133 |
+
"acc_norm_stderr": 0.03225799476233484
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.31290322580645163,
|
137 |
+
"acc_stderr": 0.026377567028645854,
|
138 |
+
"acc_norm": 0.31290322580645163,
|
139 |
+
"acc_norm_stderr": 0.026377567028645854
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.2948717948717949,
|
143 |
+
"acc_stderr": 0.029872577708891148,
|
144 |
+
"acc_norm": 0.2948717948717949,
|
145 |
+
"acc_norm_stderr": 0.029872577708891148
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.24528301886792453,
|
149 |
+
"acc_stderr": 0.026480357179895702,
|
150 |
+
"acc_norm": 0.24528301886792453,
|
151 |
+
"acc_norm_stderr": 0.026480357179895702
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.22727272727272727,
|
155 |
+
"acc_stderr": 0.040139645540727735,
|
156 |
+
"acc_norm": 0.22727272727272727,
|
157 |
+
"acc_norm_stderr": 0.040139645540727735
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.29259259259259257,
|
161 |
+
"acc_stderr": 0.02773896963217609,
|
162 |
+
"acc_norm": 0.29259259259259257,
|
163 |
+
"acc_norm_stderr": 0.02773896963217609
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.2980132450331126,
|
167 |
+
"acc_stderr": 0.037345356767871984,
|
168 |
+
"acc_norm": 0.2980132450331126,
|
169 |
+
"acc_norm_stderr": 0.037345356767871984
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.26865671641791045,
|
173 |
+
"acc_stderr": 0.03134328358208954,
|
174 |
+
"acc_norm": 0.26865671641791045,
|
175 |
+
"acc_norm_stderr": 0.03134328358208954
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.28901734104046245,
|
179 |
+
"acc_stderr": 0.03456425745086999,
|
180 |
+
"acc_norm": 0.28901734104046245,
|
181 |
+
"acc_norm_stderr": 0.03456425745086999
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.291005291005291,
|
185 |
+
"acc_stderr": 0.02339382650048488,
|
186 |
+
"acc_norm": 0.291005291005291,
|
187 |
+
"acc_norm_stderr": 0.02339382650048488
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.24305555555555555,
|
191 |
+
"acc_stderr": 0.03586879280080342,
|
192 |
+
"acc_norm": 0.24305555555555555,
|
193 |
+
"acc_norm_stderr": 0.03586879280080342
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.22,
|
197 |
+
"acc_stderr": 0.04163331998932269,
|
198 |
+
"acc_norm": 0.22,
|
199 |
+
"acc_norm_stderr": 0.04163331998932269
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.33,
|
203 |
+
"acc_stderr": 0.04725815626252604,
|
204 |
+
"acc_norm": 0.33,
|
205 |
+
"acc_norm_stderr": 0.04725815626252604
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.31213872832369943,
|
209 |
+
"acc_stderr": 0.024946792225272314,
|
210 |
+
"acc_norm": 0.31213872832369943,
|
211 |
+
"acc_norm_stderr": 0.024946792225272314
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3374233128834356,
|
215 |
+
"acc_stderr": 0.03714908409935574,
|
216 |
+
"acc_norm": 0.3374233128834356,
|
217 |
+
"acc_norm_stderr": 0.03714908409935574
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.2808641975308642,
|
221 |
+
"acc_stderr": 0.025006469755799208,
|
222 |
+
"acc_norm": 0.2808641975308642,
|
223 |
+
"acc_norm_stderr": 0.025006469755799208
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.23,
|
227 |
+
"acc_stderr": 0.04229525846816508,
|
228 |
+
"acc_norm": 0.23,
|
229 |
+
"acc_norm_stderr": 0.04229525846816508
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.2694300518134715,
|
233 |
+
"acc_stderr": 0.03201867122877793,
|
234 |
+
"acc_norm": 0.2694300518134715,
|
235 |
+
"acc_norm_stderr": 0.03201867122877793
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.23684210526315788,
|
239 |
+
"acc_stderr": 0.03999423879281336,
|
240 |
+
"acc_norm": 0.23684210526315788,
|
241 |
+
"acc_norm_stderr": 0.03999423879281336
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.28073394495412846,
|
245 |
+
"acc_stderr": 0.019266055045871616,
|
246 |
+
"acc_norm": 0.28073394495412846,
|
247 |
+
"acc_norm_stderr": 0.019266055045871616
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.16666666666666666,
|
251 |
+
"acc_stderr": 0.03333333333333338,
|
252 |
+
"acc_norm": 0.16666666666666666,
|
253 |
+
"acc_norm_stderr": 0.03333333333333338
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.2908496732026144,
|
257 |
+
"acc_stderr": 0.026004800363952113,
|
258 |
+
"acc_norm": 0.2908496732026144,
|
259 |
+
"acc_norm_stderr": 0.026004800363952113
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.26,
|
263 |
+
"acc_stderr": 0.04408440022768078,
|
264 |
+
"acc_norm": 0.26,
|
265 |
+
"acc_norm_stderr": 0.04408440022768078
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.4297520661157025,
|
269 |
+
"acc_stderr": 0.04519082021319773,
|
270 |
+
"acc_norm": 0.4297520661157025,
|
271 |
+
"acc_norm_stderr": 0.04519082021319773
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.40131578947368424,
|
275 |
+
"acc_stderr": 0.039889037033362836,
|
276 |
+
"acc_norm": 0.40131578947368424,
|
277 |
+
"acc_norm_stderr": 0.039889037033362836
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2875816993464052,
|
281 |
+
"acc_stderr": 0.018311653053648222,
|
282 |
+
"acc_norm": 0.2875816993464052,
|
283 |
+
"acc_norm_stderr": 0.018311653053648222
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.23404255319148937,
|
287 |
+
"acc_stderr": 0.025257861359432397,
|
288 |
+
"acc_norm": 0.23404255319148937,
|
289 |
+
"acc_norm_stderr": 0.025257861359432397
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.29464285714285715,
|
293 |
+
"acc_stderr": 0.04327040932578732,
|
294 |
+
"acc_norm": 0.29464285714285715,
|
295 |
+
"acc_norm_stderr": 0.04327040932578732
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3611111111111111,
|
299 |
+
"acc_stderr": 0.03275773486100999,
|
300 |
+
"acc_norm": 0.3611111111111111,
|
301 |
+
"acc_norm_stderr": 0.03275773486100999
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2446927374301676,
|
305 |
+
"acc_stderr": 0.014378169884098426,
|
306 |
+
"acc_norm": 0.2446927374301676,
|
307 |
+
"acc_norm_stderr": 0.014378169884098426
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.27,
|
311 |
+
"acc_stderr": 0.04461960433384739,
|
312 |
+
"acc_norm": 0.27,
|
313 |
+
"acc_norm_stderr": 0.04461960433384739
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.34,
|
317 |
+
"acc_stderr": 0.04760952285695236,
|
318 |
+
"acc_norm": 0.34,
|
319 |
+
"acc_norm_stderr": 0.04760952285695236
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.1948529411764706,
|
323 |
+
"acc_stderr": 0.02406059942348742,
|
324 |
+
"acc_norm": 0.1948529411764706,
|
325 |
+
"acc_norm_stderr": 0.02406059942348742
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.24489795918367346,
|
329 |
+
"acc_stderr": 0.027529637440174927,
|
330 |
+
"acc_norm": 0.24489795918367346,
|
331 |
+
"acc_norm_stderr": 0.027529637440174927
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.270042194092827,
|
335 |
+
"acc_stderr": 0.028900721906293426,
|
336 |
+
"acc_norm": 0.270042194092827,
|
337 |
+
"acc_norm_stderr": 0.028900721906293426
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2620599739243807,
|
341 |
+
"acc_stderr": 0.011231552795890394,
|
342 |
+
"acc_norm": 0.2620599739243807,
|
343 |
+
"acc_norm_stderr": 0.011231552795890394
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.2696078431372549,
|
347 |
+
"acc_stderr": 0.03114557065948678,
|
348 |
+
"acc_norm": 0.2696078431372549,
|
349 |
+
"acc_norm_stderr": 0.03114557065948678
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.24848484848484848,
|
353 |
+
"acc_stderr": 0.03374402644139405,
|
354 |
+
"acc_norm": 0.24848484848484848,
|
355 |
+
"acc_norm_stderr": 0.03374402644139405
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.2423500611995104,
|
359 |
+
"mc1_stderr": 0.015000674373570342,
|
360 |
+
"mc2": 0.502230955672644,
|
361 |
+
"mc2_stderr": 0.017048304732843935
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.10271546635182999,
|
365 |
+
"acc_stderr": 0.010437532255238496,
|
366 |
+
"acc_norm": 0.3695395513577332,
|
367 |
+
"acc_norm_stderr": 0.01659488340568542
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-pre1-pre2-ds-ins2-lora3",
|
436 |
+
"model_sha": "e36e97d7503ad2c1d406edd928de720fa514d1ef",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-pre1-pre2-ds-lora3-mergkit-base/result_2024-08-06 05:00:04.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.2090443686006826,
|
5 |
+
"acc_stderr": 0.011882746987406453,
|
6 |
+
"acc_norm": 0.2636518771331058,
|
7 |
+
"acc_norm_stderr": 0.012875929151297054
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.2555267874925314,
|
11 |
+
"acc_stderr": 0.004352655263682342,
|
12 |
+
"acc_norm": 0.27126070503883687,
|
13 |
+
"acc_norm_stderr": 0.004437016600956915
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.42105263157894735,
|
17 |
+
"acc_stderr": 0.03786720706234215,
|
18 |
+
"acc_norm": 0.42105263157894735,
|
19 |
+
"acc_norm_stderr": 0.03786720706234215
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.32038834951456313,
|
23 |
+
"acc_stderr": 0.046202840822800406,
|
24 |
+
"acc_norm": 0.32038834951456313,
|
25 |
+
"acc_norm_stderr": 0.046202840822800406
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.2937420178799489,
|
29 |
+
"acc_stderr": 0.01628775938849167,
|
30 |
+
"acc_norm": 0.2937420178799489,
|
31 |
+
"acc_norm_stderr": 0.01628775938849167
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.2814814814814815,
|
35 |
+
"acc_stderr": 0.03885004245800254,
|
36 |
+
"acc_norm": 0.2814814814814815,
|
37 |
+
"acc_norm_stderr": 0.03885004245800254
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.29,
|
41 |
+
"acc_stderr": 0.045604802157206845,
|
42 |
+
"acc_norm": 0.29,
|
43 |
+
"acc_norm_stderr": 0.045604802157206845
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.26382978723404255,
|
47 |
+
"acc_stderr": 0.028809989854102967,
|
48 |
+
"acc_norm": 0.26382978723404255,
|
49 |
+
"acc_norm_stderr": 0.028809989854102967
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.26506024096385544,
|
53 |
+
"acc_stderr": 0.03436024037944966,
|
54 |
+
"acc_norm": 0.26506024096385544,
|
55 |
+
"acc_norm_stderr": 0.03436024037944966
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.33762057877813506,
|
59 |
+
"acc_stderr": 0.026858825879488558,
|
60 |
+
"acc_norm": 0.33762057877813506,
|
61 |
+
"acc_norm_stderr": 0.026858825879488558
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.273542600896861,
|
65 |
+
"acc_stderr": 0.029918586707798834,
|
66 |
+
"acc_norm": 0.273542600896861,
|
67 |
+
"acc_norm_stderr": 0.029918586707798834
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.31297709923664124,
|
71 |
+
"acc_stderr": 0.04066962905677697,
|
72 |
+
"acc_norm": 0.31297709923664124,
|
73 |
+
"acc_norm_stderr": 0.04066962905677697
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.41,
|
77 |
+
"acc_stderr": 0.049431107042371025,
|
78 |
+
"acc_norm": 0.41,
|
79 |
+
"acc_norm_stderr": 0.049431107042371025
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.3383838383838384,
|
83 |
+
"acc_stderr": 0.03371124142626302,
|
84 |
+
"acc_norm": 0.3383838383838384,
|
85 |
+
"acc_norm_stderr": 0.03371124142626302
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.4206896551724138,
|
89 |
+
"acc_stderr": 0.0411391498118926,
|
90 |
+
"acc_norm": 0.4206896551724138,
|
91 |
+
"acc_norm_stderr": 0.0411391498118926
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.30392156862745096,
|
95 |
+
"acc_stderr": 0.04576665403207763,
|
96 |
+
"acc_norm": 0.30392156862745096,
|
97 |
+
"acc_norm_stderr": 0.04576665403207763
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.3907563025210084,
|
101 |
+
"acc_stderr": 0.031693802357129965,
|
102 |
+
"acc_norm": 0.3907563025210084,
|
103 |
+
"acc_norm_stderr": 0.031693802357129965
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.3564102564102564,
|
107 |
+
"acc_stderr": 0.0242831405294673,
|
108 |
+
"acc_norm": 0.3564102564102564,
|
109 |
+
"acc_norm_stderr": 0.0242831405294673
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.31,
|
113 |
+
"acc_stderr": 0.04648231987117316,
|
114 |
+
"acc_norm": 0.31,
|
115 |
+
"acc_norm_stderr": 0.04648231987117316
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.34,
|
119 |
+
"acc_stderr": 0.047609522856952344,
|
120 |
+
"acc_norm": 0.34,
|
121 |
+
"acc_norm_stderr": 0.047609522856952344
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.2962962962962963,
|
125 |
+
"acc_stderr": 0.04414343666854933,
|
126 |
+
"acc_norm": 0.2962962962962963,
|
127 |
+
"acc_norm_stderr": 0.04414343666854933
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.35467980295566504,
|
131 |
+
"acc_stderr": 0.03366124489051449,
|
132 |
+
"acc_norm": 0.35467980295566504,
|
133 |
+
"acc_norm_stderr": 0.03366124489051449
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.34516129032258064,
|
137 |
+
"acc_stderr": 0.02704574657353432,
|
138 |
+
"acc_norm": 0.34516129032258064,
|
139 |
+
"acc_norm_stderr": 0.02704574657353432
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.28205128205128205,
|
143 |
+
"acc_stderr": 0.029480360549541194,
|
144 |
+
"acc_norm": 0.28205128205128205,
|
145 |
+
"acc_norm_stderr": 0.029480360549541194
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.3018867924528302,
|
149 |
+
"acc_stderr": 0.028254200344438662,
|
150 |
+
"acc_norm": 0.3018867924528302,
|
151 |
+
"acc_norm_stderr": 0.028254200344438662
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.23636363636363636,
|
155 |
+
"acc_stderr": 0.04069306319721376,
|
156 |
+
"acc_norm": 0.23636363636363636,
|
157 |
+
"acc_norm_stderr": 0.04069306319721376
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.31851851851851853,
|
161 |
+
"acc_stderr": 0.02840653309060846,
|
162 |
+
"acc_norm": 0.31851851851851853,
|
163 |
+
"acc_norm_stderr": 0.02840653309060846
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.3509933774834437,
|
167 |
+
"acc_stderr": 0.03896981964257375,
|
168 |
+
"acc_norm": 0.3509933774834437,
|
169 |
+
"acc_norm_stderr": 0.03896981964257375
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.32338308457711445,
|
173 |
+
"acc_stderr": 0.03307615947979033,
|
174 |
+
"acc_norm": 0.32338308457711445,
|
175 |
+
"acc_norm_stderr": 0.03307615947979033
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.3930635838150289,
|
179 |
+
"acc_stderr": 0.03724249595817731,
|
180 |
+
"acc_norm": 0.3930635838150289,
|
181 |
+
"acc_norm_stderr": 0.03724249595817731
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.30952380952380953,
|
185 |
+
"acc_stderr": 0.023809523809523864,
|
186 |
+
"acc_norm": 0.30952380952380953,
|
187 |
+
"acc_norm_stderr": 0.023809523809523864
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.2569444444444444,
|
191 |
+
"acc_stderr": 0.03653946969442099,
|
192 |
+
"acc_norm": 0.2569444444444444,
|
193 |
+
"acc_norm_stderr": 0.03653946969442099
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.32,
|
197 |
+
"acc_stderr": 0.04688261722621504,
|
198 |
+
"acc_norm": 0.32,
|
199 |
+
"acc_norm_stderr": 0.04688261722621504
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.31,
|
203 |
+
"acc_stderr": 0.046482319871173156,
|
204 |
+
"acc_norm": 0.31,
|
205 |
+
"acc_norm_stderr": 0.046482319871173156
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.36127167630057805,
|
209 |
+
"acc_stderr": 0.02586220185227789,
|
210 |
+
"acc_norm": 0.36127167630057805,
|
211 |
+
"acc_norm_stderr": 0.02586220185227789
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.26993865030674846,
|
215 |
+
"acc_stderr": 0.0348782516849789,
|
216 |
+
"acc_norm": 0.26993865030674846,
|
217 |
+
"acc_norm_stderr": 0.0348782516849789
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.26851851851851855,
|
221 |
+
"acc_stderr": 0.02465968518596729,
|
222 |
+
"acc_norm": 0.26851851851851855,
|
223 |
+
"acc_norm_stderr": 0.02465968518596729
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.31,
|
227 |
+
"acc_stderr": 0.04648231987117316,
|
228 |
+
"acc_norm": 0.31,
|
229 |
+
"acc_norm_stderr": 0.04648231987117316
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.27461139896373055,
|
233 |
+
"acc_stderr": 0.03221024508041153,
|
234 |
+
"acc_norm": 0.27461139896373055,
|
235 |
+
"acc_norm_stderr": 0.03221024508041153
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2807017543859649,
|
239 |
+
"acc_stderr": 0.04227054451232199,
|
240 |
+
"acc_norm": 0.2807017543859649,
|
241 |
+
"acc_norm_stderr": 0.04227054451232199
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.30825688073394497,
|
245 |
+
"acc_stderr": 0.019798366698367268,
|
246 |
+
"acc_norm": 0.30825688073394497,
|
247 |
+
"acc_norm_stderr": 0.019798366698367268
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.23015873015873015,
|
251 |
+
"acc_stderr": 0.037649508797906045,
|
252 |
+
"acc_norm": 0.23015873015873015,
|
253 |
+
"acc_norm_stderr": 0.037649508797906045
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.369281045751634,
|
257 |
+
"acc_stderr": 0.027634176689602663,
|
258 |
+
"acc_norm": 0.369281045751634,
|
259 |
+
"acc_norm_stderr": 0.027634176689602663
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.28,
|
263 |
+
"acc_stderr": 0.045126085985421276,
|
264 |
+
"acc_norm": 0.28,
|
265 |
+
"acc_norm_stderr": 0.045126085985421276
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.47107438016528924,
|
269 |
+
"acc_stderr": 0.04556710331269498,
|
270 |
+
"acc_norm": 0.47107438016528924,
|
271 |
+
"acc_norm_stderr": 0.04556710331269498
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.35526315789473684,
|
275 |
+
"acc_stderr": 0.03894734487013316,
|
276 |
+
"acc_norm": 0.35526315789473684,
|
277 |
+
"acc_norm_stderr": 0.03894734487013316
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.25163398692810457,
|
281 |
+
"acc_stderr": 0.017555818091322273,
|
282 |
+
"acc_norm": 0.25163398692810457,
|
283 |
+
"acc_norm_stderr": 0.017555818091322273
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.25886524822695034,
|
287 |
+
"acc_stderr": 0.026129572527180848,
|
288 |
+
"acc_norm": 0.25886524822695034,
|
289 |
+
"acc_norm_stderr": 0.026129572527180848
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.26785714285714285,
|
293 |
+
"acc_stderr": 0.04203277291467762,
|
294 |
+
"acc_norm": 0.26785714285714285,
|
295 |
+
"acc_norm_stderr": 0.04203277291467762
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.4351851851851852,
|
299 |
+
"acc_stderr": 0.033812000056435254,
|
300 |
+
"acc_norm": 0.4351851851851852,
|
301 |
+
"acc_norm_stderr": 0.033812000056435254
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.27262569832402234,
|
305 |
+
"acc_stderr": 0.014893391735249608,
|
306 |
+
"acc_norm": 0.27262569832402234,
|
307 |
+
"acc_norm_stderr": 0.014893391735249608
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.32,
|
311 |
+
"acc_stderr": 0.046882617226215034,
|
312 |
+
"acc_norm": 0.32,
|
313 |
+
"acc_norm_stderr": 0.046882617226215034
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.41,
|
317 |
+
"acc_stderr": 0.049431107042371025,
|
318 |
+
"acc_norm": 0.41,
|
319 |
+
"acc_norm_stderr": 0.049431107042371025
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.375,
|
323 |
+
"acc_stderr": 0.029408372932278746,
|
324 |
+
"acc_norm": 0.375,
|
325 |
+
"acc_norm_stderr": 0.029408372932278746
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.3142857142857143,
|
329 |
+
"acc_stderr": 0.029719329422417475,
|
330 |
+
"acc_norm": 0.3142857142857143,
|
331 |
+
"acc_norm_stderr": 0.029719329422417475
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.3037974683544304,
|
335 |
+
"acc_stderr": 0.029936696387138605,
|
336 |
+
"acc_norm": 0.3037974683544304,
|
337 |
+
"acc_norm_stderr": 0.029936696387138605
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2835723598435463,
|
341 |
+
"acc_stderr": 0.011511900775968318,
|
342 |
+
"acc_norm": 0.2835723598435463,
|
343 |
+
"acc_norm_stderr": 0.011511900775968318
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.30392156862745096,
|
347 |
+
"acc_stderr": 0.03228210387037893,
|
348 |
+
"acc_norm": 0.30392156862745096,
|
349 |
+
"acc_norm_stderr": 0.03228210387037893
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.2545454545454545,
|
353 |
+
"acc_stderr": 0.03401506715249039,
|
354 |
+
"acc_norm": 0.2545454545454545,
|
355 |
+
"acc_norm_stderr": 0.03401506715249039
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.25458996328029376,
|
359 |
+
"mc1_stderr": 0.015250117079156482,
|
360 |
+
"mc2": 0.5010694048297774,
|
361 |
+
"mc2_stderr": 0.016769441079303827
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.14994096812278632,
|
365 |
+
"acc_stderr": 0.012274378656217326,
|
366 |
+
"acc_norm": 0.29634002361275086,
|
367 |
+
"acc_norm_stderr": 0.015699701628594232
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-pre1-pre2-ds-lora3-mergkit-base",
|
436 |
+
"model_sha": "128792e6e9c8774b61fc763735a99f7faa6aad41",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|
4yo1/llama3-pre1-pre2-ds-lora3/result_2024-07-26 21:17:55.json
ADDED
@@ -0,0 +1,444 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"harness|ko_arc_challenge|25": {
|
4 |
+
"acc": 0.20392491467576793,
|
5 |
+
"acc_stderr": 0.011774262478702256,
|
6 |
+
"acc_norm": 0.25853242320819114,
|
7 |
+
"acc_norm_stderr": 0.012794553754288666
|
8 |
+
},
|
9 |
+
"harness|ko_hellaswag|10": {
|
10 |
+
"acc": 0.2531368253335989,
|
11 |
+
"acc_stderr": 0.004339200363454499,
|
12 |
+
"acc_norm": 0.253734315873332,
|
13 |
+
"acc_norm_stderr": 0.004342580277662754
|
14 |
+
},
|
15 |
+
"harness|ko_mmlu_world_religions|5": {
|
16 |
+
"acc": 0.38596491228070173,
|
17 |
+
"acc_stderr": 0.03733756969066164,
|
18 |
+
"acc_norm": 0.38596491228070173,
|
19 |
+
"acc_norm_stderr": 0.03733756969066164
|
20 |
+
},
|
21 |
+
"harness|ko_mmlu_management|5": {
|
22 |
+
"acc": 0.2815533980582524,
|
23 |
+
"acc_stderr": 0.04453254836326468,
|
24 |
+
"acc_norm": 0.2815533980582524,
|
25 |
+
"acc_norm_stderr": 0.04453254836326468
|
26 |
+
},
|
27 |
+
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
+
"acc": 0.30268199233716475,
|
29 |
+
"acc_stderr": 0.016428781581749364,
|
30 |
+
"acc_norm": 0.30268199233716475,
|
31 |
+
"acc_norm_stderr": 0.016428781581749364
|
32 |
+
},
|
33 |
+
"harness|ko_mmlu_anatomy|5": {
|
34 |
+
"acc": 0.37777777777777777,
|
35 |
+
"acc_stderr": 0.04188307537595852,
|
36 |
+
"acc_norm": 0.37777777777777777,
|
37 |
+
"acc_norm_stderr": 0.04188307537595852
|
38 |
+
},
|
39 |
+
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
+
"acc": 0.37,
|
41 |
+
"acc_stderr": 0.048523658709391,
|
42 |
+
"acc_norm": 0.37,
|
43 |
+
"acc_norm_stderr": 0.048523658709391
|
44 |
+
},
|
45 |
+
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
+
"acc": 0.24680851063829787,
|
47 |
+
"acc_stderr": 0.02818544130123409,
|
48 |
+
"acc_norm": 0.24680851063829787,
|
49 |
+
"acc_norm_stderr": 0.02818544130123409
|
50 |
+
},
|
51 |
+
"harness|ko_mmlu_virology|5": {
|
52 |
+
"acc": 0.3072289156626506,
|
53 |
+
"acc_stderr": 0.035915667978246635,
|
54 |
+
"acc_norm": 0.3072289156626506,
|
55 |
+
"acc_norm_stderr": 0.035915667978246635
|
56 |
+
},
|
57 |
+
"harness|ko_mmlu_philosophy|5": {
|
58 |
+
"acc": 0.3536977491961415,
|
59 |
+
"acc_stderr": 0.027155208103200854,
|
60 |
+
"acc_norm": 0.3536977491961415,
|
61 |
+
"acc_norm_stderr": 0.027155208103200854
|
62 |
+
},
|
63 |
+
"harness|ko_mmlu_human_aging|5": {
|
64 |
+
"acc": 0.2600896860986547,
|
65 |
+
"acc_stderr": 0.02944249558585747,
|
66 |
+
"acc_norm": 0.2600896860986547,
|
67 |
+
"acc_norm_stderr": 0.02944249558585747
|
68 |
+
},
|
69 |
+
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
+
"acc": 0.35877862595419846,
|
71 |
+
"acc_stderr": 0.04206739313864908,
|
72 |
+
"acc_norm": 0.35877862595419846,
|
73 |
+
"acc_norm_stderr": 0.04206739313864908
|
74 |
+
},
|
75 |
+
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
+
"acc": 0.33,
|
77 |
+
"acc_stderr": 0.04725815626252606,
|
78 |
+
"acc_norm": 0.33,
|
79 |
+
"acc_norm_stderr": 0.04725815626252606
|
80 |
+
},
|
81 |
+
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
+
"acc": 0.2878787878787879,
|
83 |
+
"acc_stderr": 0.03225883512300993,
|
84 |
+
"acc_norm": 0.2878787878787879,
|
85 |
+
"acc_norm_stderr": 0.03225883512300993
|
86 |
+
},
|
87 |
+
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
+
"acc": 0.3586206896551724,
|
89 |
+
"acc_stderr": 0.039966295748767186,
|
90 |
+
"acc_norm": 0.3586206896551724,
|
91 |
+
"acc_norm_stderr": 0.039966295748767186
|
92 |
+
},
|
93 |
+
"harness|ko_mmlu_college_physics|5": {
|
94 |
+
"acc": 0.24509803921568626,
|
95 |
+
"acc_stderr": 0.042801058373643966,
|
96 |
+
"acc_norm": 0.24509803921568626,
|
97 |
+
"acc_norm_stderr": 0.042801058373643966
|
98 |
+
},
|
99 |
+
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
+
"acc": 0.28991596638655465,
|
101 |
+
"acc_stderr": 0.029472485833136084,
|
102 |
+
"acc_norm": 0.28991596638655465,
|
103 |
+
"acc_norm_stderr": 0.029472485833136084
|
104 |
+
},
|
105 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
+
"acc": 0.26666666666666666,
|
107 |
+
"acc_stderr": 0.022421273612923703,
|
108 |
+
"acc_norm": 0.26666666666666666,
|
109 |
+
"acc_norm_stderr": 0.022421273612923703
|
110 |
+
},
|
111 |
+
"harness|ko_mmlu_computer_security|5": {
|
112 |
+
"acc": 0.34,
|
113 |
+
"acc_stderr": 0.04760952285695235,
|
114 |
+
"acc_norm": 0.34,
|
115 |
+
"acc_norm_stderr": 0.04760952285695235
|
116 |
+
},
|
117 |
+
"harness|ko_mmlu_global_facts|5": {
|
118 |
+
"acc": 0.35,
|
119 |
+
"acc_stderr": 0.047937248544110196,
|
120 |
+
"acc_norm": 0.35,
|
121 |
+
"acc_norm_stderr": 0.047937248544110196
|
122 |
+
},
|
123 |
+
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
+
"acc": 0.3055555555555556,
|
125 |
+
"acc_stderr": 0.044531975073749834,
|
126 |
+
"acc_norm": 0.3055555555555556,
|
127 |
+
"acc_norm_stderr": 0.044531975073749834
|
128 |
+
},
|
129 |
+
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
+
"acc": 0.2955665024630542,
|
131 |
+
"acc_stderr": 0.032104944337514575,
|
132 |
+
"acc_norm": 0.2955665024630542,
|
133 |
+
"acc_norm_stderr": 0.032104944337514575
|
134 |
+
},
|
135 |
+
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
+
"acc": 0.3,
|
137 |
+
"acc_stderr": 0.026069362295335137,
|
138 |
+
"acc_norm": 0.3,
|
139 |
+
"acc_norm_stderr": 0.026069362295335137
|
140 |
+
},
|
141 |
+
"harness|ko_mmlu_marketing|5": {
|
142 |
+
"acc": 0.2948717948717949,
|
143 |
+
"acc_stderr": 0.02987257770889114,
|
144 |
+
"acc_norm": 0.2948717948717949,
|
145 |
+
"acc_norm_stderr": 0.02987257770889114
|
146 |
+
},
|
147 |
+
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
+
"acc": 0.26037735849056604,
|
149 |
+
"acc_stderr": 0.027008766090708076,
|
150 |
+
"acc_norm": 0.26037735849056604,
|
151 |
+
"acc_norm_stderr": 0.027008766090708076
|
152 |
+
},
|
153 |
+
"harness|ko_mmlu_public_relations|5": {
|
154 |
+
"acc": 0.21818181818181817,
|
155 |
+
"acc_stderr": 0.03955932861795833,
|
156 |
+
"acc_norm": 0.21818181818181817,
|
157 |
+
"acc_norm_stderr": 0.03955932861795833
|
158 |
+
},
|
159 |
+
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
+
"acc": 0.29259259259259257,
|
161 |
+
"acc_stderr": 0.02773896963217609,
|
162 |
+
"acc_norm": 0.29259259259259257,
|
163 |
+
"acc_norm_stderr": 0.02773896963217609
|
164 |
+
},
|
165 |
+
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
+
"acc": 0.304635761589404,
|
167 |
+
"acc_stderr": 0.03757949922943343,
|
168 |
+
"acc_norm": 0.304635761589404,
|
169 |
+
"acc_norm_stderr": 0.03757949922943343
|
170 |
+
},
|
171 |
+
"harness|ko_mmlu_sociology|5": {
|
172 |
+
"acc": 0.2736318407960199,
|
173 |
+
"acc_stderr": 0.03152439186555402,
|
174 |
+
"acc_norm": 0.2736318407960199,
|
175 |
+
"acc_norm_stderr": 0.03152439186555402
|
176 |
+
},
|
177 |
+
"harness|ko_mmlu_college_medicine|5": {
|
178 |
+
"acc": 0.2832369942196532,
|
179 |
+
"acc_stderr": 0.03435568056047875,
|
180 |
+
"acc_norm": 0.2832369942196532,
|
181 |
+
"acc_norm_stderr": 0.03435568056047875
|
182 |
+
},
|
183 |
+
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
+
"acc": 0.291005291005291,
|
185 |
+
"acc_stderr": 0.02339382650048488,
|
186 |
+
"acc_norm": 0.291005291005291,
|
187 |
+
"acc_norm_stderr": 0.02339382650048488
|
188 |
+
},
|
189 |
+
"harness|ko_mmlu_college_biology|5": {
|
190 |
+
"acc": 0.22916666666666666,
|
191 |
+
"acc_stderr": 0.035146974678623884,
|
192 |
+
"acc_norm": 0.22916666666666666,
|
193 |
+
"acc_norm_stderr": 0.035146974678623884
|
194 |
+
},
|
195 |
+
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
+
"acc": 0.21,
|
197 |
+
"acc_stderr": 0.040936018074033256,
|
198 |
+
"acc_norm": 0.21,
|
199 |
+
"acc_norm_stderr": 0.040936018074033256
|
200 |
+
},
|
201 |
+
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
+
"acc": 0.33,
|
203 |
+
"acc_stderr": 0.04725815626252604,
|
204 |
+
"acc_norm": 0.33,
|
205 |
+
"acc_norm_stderr": 0.04725815626252604
|
206 |
+
},
|
207 |
+
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
+
"acc": 0.33815028901734107,
|
209 |
+
"acc_stderr": 0.025469770149400175,
|
210 |
+
"acc_norm": 0.33815028901734107,
|
211 |
+
"acc_norm_stderr": 0.025469770149400175
|
212 |
+
},
|
213 |
+
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
+
"acc": 0.3128834355828221,
|
215 |
+
"acc_stderr": 0.03642914578292404,
|
216 |
+
"acc_norm": 0.3128834355828221,
|
217 |
+
"acc_norm_stderr": 0.03642914578292404
|
218 |
+
},
|
219 |
+
"harness|ko_mmlu_prehistory|5": {
|
220 |
+
"acc": 0.2839506172839506,
|
221 |
+
"acc_stderr": 0.02508947852376513,
|
222 |
+
"acc_norm": 0.2839506172839506,
|
223 |
+
"acc_norm_stderr": 0.02508947852376513
|
224 |
+
},
|
225 |
+
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
+
"acc": 0.24,
|
227 |
+
"acc_stderr": 0.042923469599092816,
|
228 |
+
"acc_norm": 0.24,
|
229 |
+
"acc_norm_stderr": 0.042923469599092816
|
230 |
+
},
|
231 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
+
"acc": 0.24352331606217617,
|
233 |
+
"acc_stderr": 0.030975436386845436,
|
234 |
+
"acc_norm": 0.24352331606217617,
|
235 |
+
"acc_norm_stderr": 0.030975436386845436
|
236 |
+
},
|
237 |
+
"harness|ko_mmlu_econometrics|5": {
|
238 |
+
"acc": 0.2543859649122807,
|
239 |
+
"acc_stderr": 0.04096985139843671,
|
240 |
+
"acc_norm": 0.2543859649122807,
|
241 |
+
"acc_norm_stderr": 0.04096985139843671
|
242 |
+
},
|
243 |
+
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
+
"acc": 0.29357798165137616,
|
245 |
+
"acc_stderr": 0.019525151122639663,
|
246 |
+
"acc_norm": 0.29357798165137616,
|
247 |
+
"acc_norm_stderr": 0.019525151122639663
|
248 |
+
},
|
249 |
+
"harness|ko_mmlu_formal_logic|5": {
|
250 |
+
"acc": 0.15873015873015872,
|
251 |
+
"acc_stderr": 0.03268454013011743,
|
252 |
+
"acc_norm": 0.15873015873015872,
|
253 |
+
"acc_norm_stderr": 0.03268454013011743
|
254 |
+
},
|
255 |
+
"harness|ko_mmlu_nutrition|5": {
|
256 |
+
"acc": 0.3006535947712418,
|
257 |
+
"acc_stderr": 0.026256053835718964,
|
258 |
+
"acc_norm": 0.3006535947712418,
|
259 |
+
"acc_norm_stderr": 0.026256053835718964
|
260 |
+
},
|
261 |
+
"harness|ko_mmlu_business_ethics|5": {
|
262 |
+
"acc": 0.24,
|
263 |
+
"acc_stderr": 0.04292346959909281,
|
264 |
+
"acc_norm": 0.24,
|
265 |
+
"acc_norm_stderr": 0.04292346959909281
|
266 |
+
},
|
267 |
+
"harness|ko_mmlu_international_law|5": {
|
268 |
+
"acc": 0.4214876033057851,
|
269 |
+
"acc_stderr": 0.045077322787750944,
|
270 |
+
"acc_norm": 0.4214876033057851,
|
271 |
+
"acc_norm_stderr": 0.045077322787750944
|
272 |
+
},
|
273 |
+
"harness|ko_mmlu_astronomy|5": {
|
274 |
+
"acc": 0.40131578947368424,
|
275 |
+
"acc_stderr": 0.039889037033362836,
|
276 |
+
"acc_norm": 0.40131578947368424,
|
277 |
+
"acc_norm_stderr": 0.039889037033362836
|
278 |
+
},
|
279 |
+
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
+
"acc": 0.2875816993464052,
|
281 |
+
"acc_stderr": 0.018311653053648222,
|
282 |
+
"acc_norm": 0.2875816993464052,
|
283 |
+
"acc_norm_stderr": 0.018311653053648222
|
284 |
+
},
|
285 |
+
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
+
"acc": 0.24113475177304963,
|
287 |
+
"acc_stderr": 0.025518731049537786,
|
288 |
+
"acc_norm": 0.24113475177304963,
|
289 |
+
"acc_norm_stderr": 0.025518731049537786
|
290 |
+
},
|
291 |
+
"harness|ko_mmlu_machine_learning|5": {
|
292 |
+
"acc": 0.30357142857142855,
|
293 |
+
"acc_stderr": 0.04364226155841044,
|
294 |
+
"acc_norm": 0.30357142857142855,
|
295 |
+
"acc_norm_stderr": 0.04364226155841044
|
296 |
+
},
|
297 |
+
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
+
"acc": 0.3425925925925926,
|
299 |
+
"acc_stderr": 0.03236585252602158,
|
300 |
+
"acc_norm": 0.3425925925925926,
|
301 |
+
"acc_norm_stderr": 0.03236585252602158
|
302 |
+
},
|
303 |
+
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
+
"acc": 0.2670391061452514,
|
305 |
+
"acc_stderr": 0.01479650262256255,
|
306 |
+
"acc_norm": 0.2670391061452514,
|
307 |
+
"acc_norm_stderr": 0.01479650262256255
|
308 |
+
},
|
309 |
+
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
+
"acc": 0.25,
|
311 |
+
"acc_stderr": 0.04351941398892446,
|
312 |
+
"acc_norm": 0.25,
|
313 |
+
"acc_norm_stderr": 0.04351941398892446
|
314 |
+
},
|
315 |
+
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
+
"acc": 0.34,
|
317 |
+
"acc_stderr": 0.04760952285695236,
|
318 |
+
"acc_norm": 0.34,
|
319 |
+
"acc_norm_stderr": 0.04760952285695236
|
320 |
+
},
|
321 |
+
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
+
"acc": 0.19117647058823528,
|
323 |
+
"acc_stderr": 0.023886881922440355,
|
324 |
+
"acc_norm": 0.19117647058823528,
|
325 |
+
"acc_norm_stderr": 0.023886881922440355
|
326 |
+
},
|
327 |
+
"harness|ko_mmlu_security_studies|5": {
|
328 |
+
"acc": 0.24897959183673468,
|
329 |
+
"acc_stderr": 0.027682979522960224,
|
330 |
+
"acc_norm": 0.24897959183673468,
|
331 |
+
"acc_norm_stderr": 0.027682979522960224
|
332 |
+
},
|
333 |
+
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
+
"acc": 0.27848101265822783,
|
335 |
+
"acc_stderr": 0.029178682304842555,
|
336 |
+
"acc_norm": 0.27848101265822783,
|
337 |
+
"acc_norm_stderr": 0.029178682304842555
|
338 |
+
},
|
339 |
+
"harness|ko_mmlu_professional_law|5": {
|
340 |
+
"acc": 0.2627118644067797,
|
341 |
+
"acc_stderr": 0.011240545514995664,
|
342 |
+
"acc_norm": 0.2627118644067797,
|
343 |
+
"acc_norm_stderr": 0.011240545514995664
|
344 |
+
},
|
345 |
+
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
+
"acc": 0.25,
|
347 |
+
"acc_stderr": 0.03039153369274154,
|
348 |
+
"acc_norm": 0.25,
|
349 |
+
"acc_norm_stderr": 0.03039153369274154
|
350 |
+
},
|
351 |
+
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
+
"acc": 0.23636363636363636,
|
353 |
+
"acc_stderr": 0.03317505930009179,
|
354 |
+
"acc_norm": 0.23636363636363636,
|
355 |
+
"acc_norm_stderr": 0.03317505930009179
|
356 |
+
},
|
357 |
+
"harness|ko_truthfulqa_mc|0": {
|
358 |
+
"mc1": 0.24969400244798043,
|
359 |
+
"mc1_stderr": 0.015152286907148125,
|
360 |
+
"mc2": 0.506116595507612,
|
361 |
+
"mc2_stderr": 0.017054384753311957
|
362 |
+
},
|
363 |
+
"harness|ko_commongen_v2|2": {
|
364 |
+
"acc": 0.09917355371900827,
|
365 |
+
"acc_stderr": 0.010276218268084948,
|
366 |
+
"acc_norm": 0.3659976387249115,
|
367 |
+
"acc_norm_stderr": 0.01656148966489569
|
368 |
+
}
|
369 |
+
},
|
370 |
+
"versions": {
|
371 |
+
"all": 0,
|
372 |
+
"harness|ko_arc_challenge|25": 0,
|
373 |
+
"harness|ko_hellaswag|10": 0,
|
374 |
+
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
+
"harness|ko_mmlu_management|5": 1,
|
376 |
+
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
+
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
+
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
+
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
+
"harness|ko_mmlu_virology|5": 1,
|
381 |
+
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
+
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
+
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
+
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
+
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
+
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
+
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
+
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
+
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
+
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
+
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
+
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
+
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
+
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
+
"harness|ko_mmlu_marketing|5": 1,
|
396 |
+
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
+
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
+
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
+
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
+
"harness|ko_mmlu_sociology|5": 1,
|
401 |
+
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
+
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
+
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
+
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
+
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
+
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
+
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
+
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
+
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
+
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
+
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
+
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
+
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
+
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
+
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
+
"harness|ko_mmlu_international_law|5": 1,
|
417 |
+
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
+
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
+
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
+
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
+
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
+
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
+
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
+
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
+
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
+
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
+
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
+
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
+
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
+
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
+
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
+
"harness|ko_commongen_v2|2": 1
|
433 |
+
},
|
434 |
+
"config_general": {
|
435 |
+
"model_name": "4yo1/llama3-pre1-pre2-ds-lora3",
|
436 |
+
"model_sha": "1a0c007ab818dd0b388e73fe894f1b3a0ebe592d",
|
437 |
+
"model_dtype": "torch.float16",
|
438 |
+
"lighteval_sha": "",
|
439 |
+
"num_few_shot_default": 0,
|
440 |
+
"num_fewshot_seeds": 1,
|
441 |
+
"override_batch_size": 1,
|
442 |
+
"max_samples": null
|
443 |
+
}
|
444 |
+
}
|