mt5-small-esquad-qg-ae / eval /metric.first.answer.paragraph_sentence.answer.lmqg_qg_esquad.default.json
asahi417's picture
add model
565bc31
raw
history blame
680 Bytes
{"validation": {"Bleu_1": 0.32878923204687743, "Bleu_2": 0.28176677127174277, "Bleu_3": 0.24550547413428273, "Bleu_4": 0.21461602937807223, "METEOR": 0.4191651843897522, "ROUGE_L": 0.4748575567931135, "BERTScore": 0.8869395885838578, "MoverScore": 0.7893686043679116, "AnswerF1Score": 72.73817277640266, "AnswerExactMatch": 53.964049195837276}, "test": {"Bleu_1": 0.35181080288163197, "Bleu_2": 0.3048388285027219, "Bleu_3": 0.26918240180498476, "Bleu_4": 0.23890126697246036, "METEOR": 0.4310819495505446, "ROUGE_L": 0.4857633881856472, "BERTScore": 0.8976587676674889, "MoverScore": 0.8064251215882593, "AnswerF1Score": 75.31192537985491, "AnswerExactMatch": 57.63481551561022}}