Cetvel / results /zero-shot /turna.json
Ilker Kesen
fix number of params for the Turna model
280375b
{
"model": {
"dtype": "auto",
"max_length": "1024",
"model": "boun-tabi-LMG/TURNA",
"api": "hf",
"architecture": "T5ForCondtiionalGeneration",
"type": "pretrained",
"num_parameters": "1b"
},
"results": [
{
"name": "xquad_tr",
"task": "extractive_question_answering",
"exact_match": 0.0,
"f1": 0.0
},
{
"name": "xlsum_tr",
"task": "summarization",
"rouge1": 0.1904384366601188,
"rouge2": 0.060686113611140166,
"rougeL": 0.1311090280660866
},
{
"name": "xcopa_tr",
"task": "multiple_choice",
"acc": 0.558,
"acc_norm": 0.558
},
{
"name": "wmt-tr-en-prompt",
"task": "machine_translation",
"wer": 3.9036796738046218,
"bleu": 0.0008286617236874524
},
{
"name": "wiki_lingua_tr",
"task": "summarization",
"rouge1": 0.18435291474691423,
"rouge2": 0.05584649726914134,
"rougeL": 0.13446021077350823
},
{
"name": "turkish_plu",
"task": "multiple_choice",
"acc": 0.40288,
"acc_norm": 0.44608
},
{
"name": "turkish_plu_goal_inference",
"task": "multiple_choice",
"acc": 0.37992831541218636,
"acc_norm": 0.35722819593787336
},
{
"name": "turkish_plu_next_event_prediction",
"task": "multiple_choice",
"acc": 0.383206106870229,
"acc_norm": 0.4488549618320611
},
{
"name": "turkish_plu_step_inference",
"task": "multiple_choice",
"acc": 0.272875816993464,
"acc_norm": 0.4542483660130719
},
{
"name": "turkish_plu_step_ordering",
"task": "multiple_choice",
"acc": 0.5122428991185113,
"acc_norm": 0.5122428991185113
},
{
"name": "check_worthiness",
"task": "multiple_choice",
"acc": 0.42230347349177333,
"acc_norm": 0.620201096892139
},
{
"name": "relevance_judgment",
"task": "multiple_choice",
"acc": 0.4904021937842779,
"acc_norm": 0.5781535648994516
},
{
"name": "tr-wikihow-summ",
"task": "summarization",
"rouge1": 0.20515501424269858,
"rouge2": 0.05693981251975118,
"rougeL": 0.1449313333992171
},
{
"name": "tquad",
"task": "extractive_question_answering",
"exact_match": 0.0,
"f1": 0.0003736920777279522
},
{
"name": "sts_tr",
"task": "text_classification",
"acc": 0.14213197969543148,
"acc_norm": 0.19506889050036258
},
{
"name": "offenseval_tr",
"task": "text_classification",
"acc": 0.5099206349206349,
"acc_norm": 0.7970521541950113
},
{
"name": "mnli_tr",
"task": "natural_language_inference",
"acc": 0.3203,
"acc_norm": 0.3159
},
{
"name": "snli_tr",
"task": "natural_language_inference",
"acc": 0.3223,
"acc_norm": 0.3278
},
{
"name": "xnli_tr",
"task": "natural_language_inference",
"acc": 0.32974051896207585,
"acc_norm": 0.3277445109780439
},
{
"name": "news_cat",
"task": "text_classification",
"acc": 0.328,
"acc_norm": 0.208
},
{
"name": "mlsum_tr",
"task": "summarization",
"rouge1": 0.20830277213555015,
"rouge2": 0.11040542892341527,
"rougeL": 0.16135585618616377
},
{
"name": "mkqa_tr",
"task": "extractive_question_answering",
"exact_match": 0.0011837821840781297,
"f1": 0.006720430107526878
},
{
"name": "ironytr",
"task": "text_classification",
"acc": 0.48333333333333334,
"acc_norm": 0.5033333333333333
},
{
"name": "gecturk_generation",
"task": "grammatical_error_correction",
"exact_match": 0.0
},
{
"name": "exams_tr",
"task": "multiple_choice",
"acc": 0.2366412213740458,
"acc_norm": 0.2748091603053435
},
{
"name": "belebele_tr",
"task": "multiple_choice",
"acc": 0.22555555555555556,
"acc_norm": 0.22555555555555556
}
]
}