Spaces:
Running
Running
import gradio as gr | |
from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM | |
import os | |
AUTH_TOKEN = os.environ.get("TOKEN") | |
model = AutoModelForSeq2SeqLM.from_pretrained('bowphs/ancient-t5-translation', use_auth_token=AUTH_TOKEN) | |
tokenizer = AutoTokenizer.from_pretrained('bowphs/ancient-t5-translation', use_auth_token=AUTH_TOKEN) | |
generator = pipeline('text2text-generation', model=model, tokenizer=tokenizer) | |
def generate(text, generation_args): | |
arguments = {} | |
if generation_args: | |
pairs = generation_args.split(",") | |
for pair in pairs: | |
key, value = pair.strip().split('=') | |
arguments[key] = eval(value) | |
result = generator(text, max_length=30, num_return_sequences=1, **arguments) | |
return result[0]["generated_text"] | |
examples = [ | |
["translate english to latin: and he took the sword and killed the man."], | |
["translate english to greek: and he took the sword and killed the man."], | |
] | |
demo = gr.Interface( | |
fn=generate, | |
inputs=[gr.components.Textbox(value="translate greek to english: ὁ ἄνθρωπος τὸν οἶνον πίνειν ἐθέλων τὸν κρατῆρα ἔλαβεν.", label="Input Text"), gr.components.Textbox(value="do_sample=False, num_beams=3", label="Generation Parameters")], | |
outputs=gr.components.Textbox(value="the man took the bowl with the intention of drinking wine.", label="Generated Text"), | |
examples=examples | |
) | |
demo.launch() | |