|
import gradio as gr |
|
from transformers import GPT2LMHeadModel, PreTrainedTokenizerFast |
|
|
|
|
|
model = GPT2LMHeadModel.from_pretrained("skt/kogpt2-base-v2") |
|
tokenizer = PreTrainedTokenizerFast.from_pretrained("skt/kogpt2-base-v2") |
|
|
|
def generate_diary(keywords): |
|
|
|
prompt = f"์ค๋์ ์ผ๊ธฐ:\n\n{', '.join(keywords.split(','))}์ ๋ํ ์ผ๊ธฐ๋ฅผ ์จ๋ด
์๋ค." |
|
input_ids = tokenizer.encode(prompt, return_tensors="pt") |
|
output = model.generate(input_ids, max_length=500, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95, num_beams=5, no_repeat_ngram_size=2) |
|
|
|
|
|
diary = tokenizer.decode(output[0], skip_special_tokens=True) |
|
return diary |
|
|
|
def app(): |
|
with gr.Blocks() as demo: |
|
gr.Markdown("# ์๋ ์ผ๊ธฐ ์์ฑ๊ธฐ") |
|
|
|
with gr.Row(): |
|
keywords = gr.Textbox(label="5๊ฐ์ ํค์๋๋ฅผ ์
๋ ฅํ์ธ์ (์ผํ๋ก ๊ตฌ๋ถ)") |
|
generate_btn = gr.Button("์ผ๊ธฐ ์ฐ๊ธฐ") |
|
|
|
diary = gr.Textbox(label="์์ฑ๋ ์ผ๊ธฐ") |
|
|
|
generate_btn.click(generate_diary, inputs=keywords, outputs=diary) |
|
|
|
demo.launch() |
|
|
|
if __name__ == "__main__": |
|
app() |