mt5-base-esquad-qg / eval_pipeline /metric.first.answer.paragraph.questions_answers.lmqg_qg_esquad.default.lmqg_mt5-base-esquad-ae.json
asahi417's picture
add model
40a6fe4
raw
history blame
1.16 kB
{"test": {"QAAlignedF1Score (BERTScore)": 0.8078713605450362, "QAAlignedRecall (BERTScore)": 0.833449071514955, "QAAlignedPrecision (BERTScore)": 0.7845042023297946, "QAAlignedF1Score (MoverScore)": 0.5524791466133201, "QAAlignedRecall (MoverScore)": 0.5698607600911927, "QAAlignedPrecision (MoverScore)": 0.5370291078038771, "Bleu_1": 0.11750007487216974, "Bleu_2": 0.05740989346184767, "Bleu_3": 0.030421977374148566, "Bleu_4": 0.017927069525796223, "METEOR": 0.22642532785945516, "ROUGE_L": 0.1603169325957768, "BERTScore": 0.705877662638828, "MoverScore": 0.5199675513991766}, "validation": {"QAAlignedF1Score (BERTScore)": 0.8215792579317507, "QAAlignedRecall (BERTScore)": 0.8231454011023817, "QAAlignedPrecision (BERTScore)": 0.820431418504882, "QAAlignedF1Score (MoverScore)": 0.563937127823911, "QAAlignedRecall (MoverScore)": 0.5626022130349787, "QAAlignedPrecision (MoverScore)": 0.565636640036235, "Bleu_1": 0.298730148865766, "Bleu_2": 0.1810636845934705, "Bleu_3": 0.10711163929332837, "Bleu_4": 0.06757363487116197, "METEOR": 0.26747895860633, "ROUGE_L": 0.2680495512959466, "BERTScore": 0.7880353709898194, "MoverScore": 0.5539419946142672}}