mt5-small-esquad-qg / eval /metric.first.answer.paragraph.questions_answers.lmqg_qg_esquad.default.json
asahi417's picture
add model
05dc6a1
raw
history blame
1.16 kB
{"test": {"QAAlignedF1Score (BERTScore)": 0.8942792456228544, "QAAlignedF1Score (MoverScore)": 0.6373351963820404, "QAAlignedRecall (BERTScore)": 0.8941228690725108, "QAAlignedPrecision (BERTScore)": 0.89444757811698, "QAAlignedRecall (MoverScore)": 0.6371821518091849, "QAAlignedPrecision (MoverScore)": 0.6375008782072481, "Bleu_1": 0.3984300954789785, "Bleu_2": 0.2658502588742567, "Bleu_3": 0.18281605727500605, "Bleu_4": 0.12770726746050978, "METEOR": 0.3757509601817376, "ROUGE_L": 0.42765835048306905, "BERTScore": 0.8940550432003671, "MoverScore": 0.6355630625706891}, "validation": {"QAAlignedF1Score (BERTScore)": 0.8926300790589307, "QAAlignedF1Score (MoverScore)": 0.63059397194842, "QAAlignedRecall (BERTScore)": 0.8913472192262684, "QAAlignedPrecision (BERTScore)": 0.8939639724466093, "QAAlignedRecall (MoverScore)": 0.629089058243695, "QAAlignedPrecision (MoverScore)": 0.6321579412618986, "Bleu_1": 0.4640246177231955, "Bleu_2": 0.3439248438621679, "Bleu_3": 0.26338751893612494, "Bleu_4": 0.20675383700833067, "METEOR": 0.38402259850823106, "ROUGE_L": 0.45018832938371395, "BERTScore": 0.8746826552110533, "MoverScore": 0.6132905937302482}}