File size: 602 Bytes
0af3c9d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
{
  "results": {
    "jcommonsenseqa-1.1-0.3": {
      "acc": 0.7578194816800715,
      "acc_stderr": 0.012812432289317893,
      "acc_norm": 0.4280607685433423,
      "acc_norm_stderr": 0.014798127177394432
    }
  },
  "versions": {
    "jcommonsenseqa-1.1-0.3": 1.1
  },
  "config": {
    "model": "hf-causal-experimental",
    "model_args": "pretrained=meta-llama/Llama-2-13b-hf,peft=HachiML/Llama-2-13b-hf-qlora-dolly-ja-2ep",
    "num_fewshot": 3,
    "batch_size": 1,
    "device": "cuda",
    "no_cache": false,
    "limit": null,
    "bootstrap_iters": 100000,
    "description_dict": {}
  }
}