File size: 4,835 Bytes
500fbd7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
{
"model": {
"dtype": "bfloat16",
"max_length": "4096",
"model": "mistralai/Mistral-7B-v0.1",
"api": "hf",
"architecture": "MixtralForCausalLM",
"type": "pretrained",
"num_parameters": "7b"
},
"results": [
{
"name": "xquad_tr",
"task": "extractive_question_answering",
"exact_match": 0.16722689075630254,
"f1": 0.32150094374615246
},
{
"name": "xcopa_tr",
"task": "multiple_choice",
"acc": 0.566,
"acc_norm": 0.566
},
{
"name": "turkish_plu",
"task": "multiple_choice",
"acc": 0.45152,
"acc_norm": 0.5136
},
{
"name": "turkish_plu_goal_inference",
"task": "multiple_choice",
"acc": 0.42771804062126645,
"acc_norm": 0.46714456391875747
},
{
"name": "turkish_plu_next_event_prediction",
"task": "multiple_choice",
"acc": 0.39541984732824426,
"acc_norm": 0.5022900763358779
},
{
"name": "turkish_plu_step_inference",
"task": "multiple_choice",
"acc": 0.29248366013071897,
"acc_norm": 0.4411764705882353
},
{
"name": "turkish_plu_step_ordering",
"task": "multiple_choice",
"acc": 0.6023506366307542,
"acc_norm": 0.6023506366307542
},
{
"name": "check_worthiness",
"task": "multiple_choice",
"acc": 0.37614259597806216,
"acc_norm": 0.42458866544789764
},
{
"name": "relevance_judgment",
"task": "multiple_choice",
"acc": 0.4218464351005484,
"acc_norm": 0.49588665447897623
},
{
"name": "tquad",
"task": "extractive_question_answering",
"exact_match": 0.2096412556053812,
"f1": 0.4767364701184728
},
{
"name": "sts_tr",
"task": "text_classification",
"acc": 0.135605511240029,
"acc_norm": 0.20522117476432197
},
{
"name": "offenseval_tr",
"task": "text_classification",
"acc": 0.2046485260770975,
"acc_norm": 0.3735827664399093
},
{
"name": "mnli_tr",
"task": "natural_language_inference",
"acc": 0.3194,
"acc_norm": 0.3267
},
{
"name": "snli_tr",
"task": "natural_language_inference",
"acc": 0.3196,
"acc_norm": 0.3201
},
{
"name": "xnli_tr",
"task": "natural_language_inference",
"acc": 0.331936127744511,
"acc_norm": 0.34910179640718564
},
{
"name": "news_cat",
"task": "text_classification",
"acc": 0.652,
"acc_norm": 0.44
},
{
"name": "mkqa_tr",
"task": "extractive_question_answering",
"exact_match": 0.12030186445693992,
"f1": 0.16163416207615164
},
{
"name": "ironytr",
"task": "text_classification",
"acc": 0.5016666666666667,
"acc_norm": 0.52
},
{
"name": "exams_tr",
"task": "multiple_choice",
"acc": 0.24173027989821882,
"acc_norm": 0.30279898218829515
},
{
"name": "belebele_tr",
"task": "multiple_choice",
"acc": 0.37444444444444447,
"acc_norm": 0.37444444444444447
},
{
"name": "gecturk_generation",
"task": "grammatical_error_correction",
"exact_match": 0.20660599932591844
},
{
"name": "mlsum_tr",
"task": "summarization",
"rouge1": 0.09403885616158554,
"rouge2": 0.06300721907752257,
"rougeL": 0.08169726458665999
},
{
"name": "wiki_lingua_tr",
"task": "summarization",
"rouge1": 0.1905392717787084,
"rouge2": 0.05957088325130176,
"rougeL": 0.1472985242082243
},
{
"name": "wmt-tr-en-prompt",
"task": "machine_translation",
"wer": 1.0876062644712858,
"bleu": 0.04973628734419603
},
{
"name": "xlsum_tr",
"task": "summarization",
"rouge1": 0.02720399421152351,
"rouge2": 0.012032606076011431,
"rougeL": 0.02311080687545987
}
]
} |