Bram Vanroy commited on
Commit
d64c180
1 Parent(s): 4a78e88

add evaluations

Browse files
evals/mmlu/mmlu_nl_Llama-2-13b-chat-dutch.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu_nl": {
4
+ "acc": 0.4120057676254079,
5
+ "acc_stderr": 0.00428791717360262,
6
+ "acc_norm": 0.35387417469833804,
7
+ "acc_norm_stderr": 0.0041657307070805455
8
+ }
9
+ },
10
+ "versions": {
11
+ "mmlu_nl": 0
12
+ },
13
+ "config": {
14
+ "model": "hf-auto",
15
+ "model_args": "pretrained=BramVanroy/Llama-2-13b-chat-dutch,use_accelerate=True,device_map_option=auto,dtype=bfloat16,load_in_8bit=True",
16
+ "batch_size": 1,
17
+ "device": "cuda",
18
+ "no_cache": false,
19
+ "limit": null,
20
+ "bootstrap_iters": 100000,
21
+ "description_dict": {}
22
+ }
23
+ }
evals/mmlu/mmlu_nl_Llama-2-13b-chat-hf.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu_nl": {
4
+ "acc": 0.4193670790012901,
5
+ "acc_stderr": 0.004298888710440378,
6
+ "acc_norm": 0.37398497381801626,
7
+ "acc_norm_stderr": 0.004215291981440015
8
+ }
9
+ },
10
+ "versions": {
11
+ "mmlu_nl": 0
12
+ },
13
+ "config": {
14
+ "model": "hf-auto",
15
+ "model_args": "pretrained=meta-llama/Llama-2-13b-chat-hf,use_accelerate=True,device_map_option=auto,dtype=bfloat16,load_in_8bit=True",
16
+ "batch_size": 1,
17
+ "device": "cuda",
18
+ "no_cache": false,
19
+ "limit": null,
20
+ "bootstrap_iters": 100000,
21
+ "description_dict": {}
22
+ }
23
+ }
evals/mmlu/mmlu_nl_Llama-2-13b-hf.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu_nl": {
4
+ "acc": 0.416938605145329,
5
+ "acc_stderr": 0.004295378164911688,
6
+ "acc_norm": 0.35637853836229794,
7
+ "acc_norm_stderr": 0.0041723356313098465
8
+ }
9
+ },
10
+ "versions": {
11
+ "mmlu_nl": 0
12
+ },
13
+ "config": {
14
+ "model": "hf-auto",
15
+ "model_args": "pretrained=meta-llama/Llama-2-13b-hf,use_accelerate=True,device_map_option=auto,dtype=bfloat16,load_in_8bit=True",
16
+ "batch_size": 1,
17
+ "device": "cuda",
18
+ "no_cache": false,
19
+ "limit": null,
20
+ "bootstrap_iters": 100000,
21
+ "description_dict": {}
22
+ }
23
+ }
evals/mmlu/mmlu_nl_llama2-13b-ft-mc4_nl_cleaned_tiny.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu_nl": {
4
+ "acc": 0.4082871670334674,
5
+ "acc_stderr": 0.004281999043287171,
6
+ "acc_norm": 0.35190103969036957,
7
+ "acc_norm_stderr": 0.004160438860806826
8
+ }
9
+ },
10
+ "versions": {
11
+ "mmlu_nl": 0
12
+ },
13
+ "config": {
14
+ "model": "hf-auto",
15
+ "model_args": "pretrained=BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny,use_accelerate=True,device_map_option=auto,dtype=bfloat16,load_in_8bit=True",
16
+ "batch_size": 1,
17
+ "device": "cuda",
18
+ "no_cache": false,
19
+ "limit": null,
20
+ "bootstrap_iters": 100000,
21
+ "description_dict": {}
22
+ }
23
+ }