mt5-small-esquad-qg / eval_pipeline /metric.first.answer.paragraph.questions_answers.lmqg_qg_esquad.default.lmqg_mt5-small-esquad-ae.json
asahi417's picture
add model
c52fda3
raw
history blame
1.17 kB
{"test": {"QAAlignedF1Score (BERTScore)": 0.7988921058340037, "QAAlignedRecall (BERTScore)": 0.8255948211525714, "QAAlignedPrecision (BERTScore)": 0.7745897872936881, "QAAlignedF1Score (MoverScore)": 0.5482284742612624, "QAAlignedRecall (MoverScore)": 0.5651981700650607, "QAAlignedPrecision (MoverScore)": 0.5331177725424141, "Bleu_1": 0.11100784309408389, "Bleu_2": 0.05497594877807382, "Bleu_3": 0.029998396142505098, "Bleu_4": 0.018255018827614197, "METEOR": 0.2221766581415698, "ROUGE_L": 0.15464866686144543, "BERTScore": 0.6978340706678742, "MoverScore": 0.5180443982663746}, "validation": {"QAAlignedF1Score (BERTScore)": 0.8155117270742672, "QAAlignedRecall (BERTScore)": 0.8179919089261933, "QAAlignedPrecision (BERTScore)": 0.8135820783610764, "QAAlignedF1Score (MoverScore)": 0.560619425606755, "QAAlignedRecall (MoverScore)": 0.5590370203449281, "QAAlignedPrecision (MoverScore)": 0.5626656790271288, "Bleu_1": 0.2877921197415349, "Bleu_2": 0.1749147324443688, "Bleu_3": 0.10364523791871796, "Bleu_4": 0.06518419550728718, "METEOR": 0.26654364650131734, "ROUGE_L": 0.2639580244726278, "BERTScore": 0.7798699025491844, "MoverScore": 0.5514346383053663}}