File size: 5,014 Bytes
500fbd7 280375b 500fbd7 280375b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 |
{
"model": {
"dtype": "auto",
"max_length": "1024",
"model": "boun-tabi-LMG/TURNA",
"api": "hf",
"architecture": "T5ForCondtiionalGeneration",
"type": "pretrained",
"num_parameters": "1b"
},
"results": [
{
"name": "xquad_tr",
"task": "extractive_question_answering",
"exact_match": 0.0,
"f1": 0.0
},
{
"name": "xlsum_tr",
"task": "summarization",
"rouge1": 0.1904384366601188,
"rouge2": 0.060686113611140166,
"rougeL": 0.1311090280660866
},
{
"name": "xcopa_tr",
"task": "multiple_choice",
"acc": 0.558,
"acc_norm": 0.558
},
{
"name": "wmt-tr-en-prompt",
"task": "machine_translation",
"wer": 3.9036796738046218,
"bleu": 0.0008286617236874524
},
{
"name": "wiki_lingua_tr",
"task": "summarization",
"rouge1": 0.18435291474691423,
"rouge2": 0.05584649726914134,
"rougeL": 0.13446021077350823
},
{
"name": "turkish_plu",
"task": "multiple_choice",
"acc": 0.40288,
"acc_norm": 0.44608
},
{
"name": "turkish_plu_goal_inference",
"task": "multiple_choice",
"acc": 0.37992831541218636,
"acc_norm": 0.35722819593787336
},
{
"name": "turkish_plu_next_event_prediction",
"task": "multiple_choice",
"acc": 0.383206106870229,
"acc_norm": 0.4488549618320611
},
{
"name": "turkish_plu_step_inference",
"task": "multiple_choice",
"acc": 0.272875816993464,
"acc_norm": 0.4542483660130719
},
{
"name": "turkish_plu_step_ordering",
"task": "multiple_choice",
"acc": 0.5122428991185113,
"acc_norm": 0.5122428991185113
},
{
"name": "check_worthiness",
"task": "multiple_choice",
"acc": 0.42230347349177333,
"acc_norm": 0.620201096892139
},
{
"name": "relevance_judgment",
"task": "multiple_choice",
"acc": 0.4904021937842779,
"acc_norm": 0.5781535648994516
},
{
"name": "tr-wikihow-summ",
"task": "summarization",
"rouge1": 0.20515501424269858,
"rouge2": 0.05693981251975118,
"rougeL": 0.1449313333992171
},
{
"name": "tquad",
"task": "extractive_question_answering",
"exact_match": 0.0,
"f1": 0.0003736920777279522
},
{
"name": "sts_tr",
"task": "text_classification",
"acc": 0.14213197969543148,
"acc_norm": 0.19506889050036258
},
{
"name": "offenseval_tr",
"task": "text_classification",
"acc": 0.5099206349206349,
"acc_norm": 0.7970521541950113
},
{
"name": "mnli_tr",
"task": "natural_language_inference",
"acc": 0.3203,
"acc_norm": 0.3159
},
{
"name": "snli_tr",
"task": "natural_language_inference",
"acc": 0.3223,
"acc_norm": 0.3278
},
{
"name": "xnli_tr",
"task": "natural_language_inference",
"acc": 0.32974051896207585,
"acc_norm": 0.3277445109780439
},
{
"name": "news_cat",
"task": "text_classification",
"acc": 0.328,
"acc_norm": 0.208
},
{
"name": "mlsum_tr",
"task": "summarization",
"rouge1": 0.20830277213555015,
"rouge2": 0.11040542892341527,
"rougeL": 0.16135585618616377
},
{
"name": "mkqa_tr",
"task": "extractive_question_answering",
"exact_match": 0.0011837821840781297,
"f1": 0.006720430107526878
},
{
"name": "ironytr",
"task": "text_classification",
"acc": 0.48333333333333334,
"acc_norm": 0.5033333333333333
},
{
"name": "gecturk_generation",
"task": "grammatical_error_correction",
"exact_match": 0.0
},
{
"name": "exams_tr",
"task": "multiple_choice",
"acc": 0.2366412213740458,
"acc_norm": 0.2748091603053435
},
{
"name": "belebele_tr",
"task": "multiple_choice",
"acc": 0.22555555555555556,
"acc_norm": 0.22555555555555556
}
]
}
|