{ | |
"results": { | |
"hellaswag_nl": { | |
"acc": 0.44079870480302213, | |
"acc_stderr": 0.005158280633507224, | |
"acc_norm": 0.5840259039395574, | |
"acc_norm_stderr": 0.005120942804814836 | |
} | |
}, | |
"versions": { | |
"hellaswag_nl": 1 | |
}, | |
"config": { | |
"model": "hf-auto", | |
"model_args": "pretrained=mistralai/Mistral-7B-v0.1,use_accelerate=True,device_map_option=auto,dtype=bfloat16,load_in_8bit=True", | |
"batch_size": "auto", | |
"device": "cuda", | |
"no_cache": false, | |
"limit": null, | |
"bootstrap_iters": 100000, | |
"description_dict": {} | |
} | |
} |