|
import gradio as gr |
|
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer |
|
|
|
|
|
model = AutoModelForSeq2SeqLM.from_pretrained("Kyudan/opus-mt-en-ro-finetuned-en-to-ro") |
|
tokenizer = AutoTokenizer.from_pretrained("Kyudan/opus-mt-en-ro-finetuned-en-to-ro") |
|
|
|
def respond(text): |
|
|
|
inputs = tokenizer.encode(text, return_tensors="pt") |
|
|
|
|
|
outputs = model.generate(inputs) |
|
|
|
|
|
translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
return translated_text |
|
|
|
|
|
demo = gr.Interface( |
|
fn=respond, |
|
inputs="text", |
|
outputs="text", |
|
title="Translate English to Romanian" |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch(share=True) |
|
|