mt5-small-esquad-qg-ae / eval /metric.long.sentence.sentence_answer.question.asahi417_qg_esquad.default.json
asahi417's picture
model update
9d38394
raw
history blame
530 Bytes
{"validation": {"Bleu_1": 0.2180783639897447, "Bleu_2": 0.13929892821740028, "Bleu_3": 0.0978409264908257, "Bleu_4": 0.07187742725097669, "METEOR": 0.1823521229481214, "ROUGE_L": 0.19533661187534526, "BERTScore": 0.8122137230333832, "MoverScore": 0.5614895505661768}, "test": {"Bleu_1": 0.20612920747840227, "Bleu_2": 0.12997044618806985, "Bleu_3": 0.09016879036636664, "Bleu_4": 0.06546735879123995, "METEOR": 0.17832982014314605, "ROUGE_L": 0.1881253700945325, "BERTScore": 0.8121980425047695, "MoverScore": 0.5613964751904779}}